@Override
 public IFile createFile(String path) {
   File file = new File(context.absPath(path));
   String name = file.getName();
   String parentPath = file.getParent();
   try {
     VfsUtil.createDirectories(parentPath);
   } catch (IOException e) {
     Flog.error("Create directories error %s", e);
     context.errorMessage("The Floobits plugin was unable to create directories for file.");
     return null;
   }
   VirtualFile parent = LocalFileSystem.getInstance().findFileByPath(parentPath);
   if (parent == null) {
     Flog.error("Virtual file is null? %s", parentPath);
     return null;
   }
   VirtualFile newFile;
   try {
     newFile = parent.findOrCreateChildData(context, name);
   } catch (Throwable e) {
     Flog.error("Create file error %s", e);
     context.errorMessage(
         String.format("The Floobits plugin was unable to create a file: %s.", path));
     return null;
   }
   return new FileImpl(newFile);
 }
Example #2
0
  @Test
  public void testConstructAssertion() throws KBApiException {
    System.out.println("Running testConstructAssertion");
    // "(SomeAirlineEquipmentLogFn Plane-APITest)", "(flyingDoneBySomething-Move FlightXYZ-APITest
    // Plane-APITest)"
    KBPredicate p = KBPredicateImpl.get("flyingDoneBySomething-Move");
    KBIndividual flight = KBIndividualImpl.get("FlightXYZ-APITest");
    KBIndividual plane = KBIndividualImpl.get("Plane-APITest");
    List<Object> arguments = new ArrayList<Object>();
    arguments.add(flight);
    arguments.add(plane);

    Context ctx = ContextImpl.get("(SomeAirlineEquipmentLogFn Plane-APITest)");
    Fact a = new FactImpl(ctx, p, arguments.toArray());
    assertTrue("Unable to get a fact from " + p + " " + arguments, a instanceof Fact);
    // System.out.println("Fact string: " + a.toString());

    // With date

    ContextImpl airlineLog = ContextImpl.get("SomeAirlineLogMt");
    KBPredicate start = TestConstants.kbapitc.startingDate;

    SimpleDateFormat sdf = new SimpleDateFormat("yyyy MM dd HH:mm");
    try {
      Date d = sdf.parse("2014 03 15 8:05");
      Fact wellA = new FactImpl(airlineLog, start, flight, d);
      Date checkd = wellA.<Date>getArgument(2);
      assertEquals(checkd, d);
    } catch (ParseException pe) {
      // ignore  ???@todo Aren't these failures, if we get here???
    }
  }
Example #3
0
 private synchronized void checkQuorumWhenAdded(final String nodeID, final long start) {
   if (clusterMap.containsKey(nodeID)) {
     checkQuorum();
   } else {
     vertx.setTimer(
         200,
         tid -> {
           // This can block on a monitor so it needs to run as a worker
           vertx.executeBlockingInternal(
               () -> {
                 if (System.currentTimeMillis() - start > 10000) {
                   log.warn("Timed out waiting for group information to appear");
                 } else if (!stopped) {
                   ContextImpl context = vertx.getContext();
                   try {
                     // Remove any context we have here (from the timer) otherwise will screw
                     // things up when verticles are deployed
                     ContextImpl.setContext(null);
                     checkQuorumWhenAdded(nodeID, start);
                   } finally {
                     ContextImpl.setContext(context);
                   }
                 }
                 return null;
               },
               null);
         });
   }
 }
Example #4
0
 // Add the deployment to an internal list of deploymentIDs - these will be executed when a quorum
 // is attained
 private void addToHADeployList(
     final String verticleName,
     final DeploymentOptions deploymentOptions,
     final Handler<AsyncResult<String>> doneHandler) {
   toDeployOnQuorum.add(
       () -> {
         ContextImpl ctx = vertx.getContext();
         try {
           ContextImpl.setContext(null);
           deployVerticle(verticleName, deploymentOptions, doneHandler);
         } finally {
           ContextImpl.setContext(ctx);
         }
       });
 }
Example #5
0
    public void doUndeploy(
        ContextImpl undeployingContext, Handler<AsyncResult<Void>> completionHandler) {

      if (!children.isEmpty()) {
        final int size = children.size();
        AtomicInteger childCount = new AtomicInteger();
        for (Deployment childDeployment : new HashSet<>(children)) {
          childDeployment.doUndeploy(
              undeployingContext,
              ar -> {
                children.remove(childDeployment);
                if (ar.failed()) {
                  reportFailure(ar.cause(), undeployingContext, completionHandler);
                } else if (childCount.incrementAndGet() == size) {
                  // All children undeployed
                  doUndeploy(undeployingContext, completionHandler);
                }
              });
        }
      } else {
        undeployed = true;
        context.runOnContext(
            v -> {
              Future<Void> stopFuture = new FutureResultImpl<>();
              stopFuture.setHandler(
                  ar -> {
                    deployments.remove(id);
                    context.runCloseHooks(
                        ar2 -> {
                          if (ar2.failed()) {
                            // Log error but we report success anyway
                            log.error("Failed to run close hook", ar2.cause());
                          }
                          if (ar.succeeded()) {
                            reportSuccess(null, undeployingContext, completionHandler);
                          } else {
                            reportFailure(ar.cause(), undeployingContext, completionHandler);
                          }
                        });
                  });
              try {
                verticle.stop(stopFuture);
              } catch (Throwable t) {
                stopFuture.setFailure(t);
              }
            });
      }
    }
 private VirtualFile getVirtualFile(String relPath) {
   VirtualFile fileByPath = instance.findFileByPath(context.absPath(relPath));
   if (fileByPath != null && fileByPath.isValid()) {
     return fileByPath;
   }
   return null;
 }
Example #7
0
 // Process the failover of a deployment
 private void processFailover(JsonObject failedVerticle) {
   if (failDuringFailover) {
     throw new VertxException("Oops!");
   }
   // This method must block until the failover is complete - i.e. the verticle is successfully
   // redeployed
   final String verticleName = failedVerticle.getString("verticle_name");
   final CountDownLatch latch = new CountDownLatch(1);
   final AtomicReference<Throwable> err = new AtomicReference<>();
   // Now deploy this verticle on this node
   ContextImpl ctx = vertx.getContext();
   if (ctx != null) {
     // We could be on main thread in which case we don't want to overwrite tccl
     ContextImpl.setContext(null);
   }
   JsonObject options = failedVerticle.getJsonObject("options");
   try {
     doDeployVerticle(
         verticleName,
         new DeploymentOptions(options),
         result -> {
           if (result.succeeded()) {
             log.info("Successfully redeployed verticle " + verticleName + " after failover");
           } else {
             log.error("Failed to redeploy verticle after failover", result.cause());
             err.set(result.cause());
           }
           latch.countDown();
           Throwable t = err.get();
           if (t != null) {
             throw new VertxException(t);
           }
         });
   } finally {
     if (ctx != null) {
       ContextImpl.setContext(ctx);
     }
   }
   try {
     if (!latch.await(120, TimeUnit.SECONDS)) {
       throw new VertxException("Timed out waiting for redeploy on failover");
     }
   } catch (InterruptedException e) {
     throw new IllegalStateException(e);
   }
 }
Example #8
0
 // Undeploy any HA deploymentIDs now there is no quorum
 private void undeployHADeployments() {
   for (String deploymentID : deploymentManager.deployments()) {
     Deployment dep = deploymentManager.getDeployment(deploymentID);
     if (dep != null) {
       if (dep.deploymentOptions().isHa()) {
         ContextImpl ctx = vertx.getContext();
         try {
           ContextImpl.setContext(null);
           deploymentManager.undeployVerticle(
               deploymentID,
               result -> {
                 if (result.succeeded()) {
                   log.info(
                       "Successfully undeployed HA deployment "
                           + deploymentID
                           + "-"
                           + dep.verticleIdentifier()
                           + " as there is no quorum");
                   addToHADeployList(
                       dep.verticleIdentifier(),
                       dep.deploymentOptions(),
                       result1 -> {
                         if (result1.succeeded()) {
                           log.info(
                               "Successfully redeployed verticle "
                                   + dep.verticleIdentifier()
                                   + " after quorum was re-attained");
                         } else {
                           log.error(
                               "Failed to redeploy verticle "
                                   + dep.verticleIdentifier()
                                   + " after quorum was re-attained",
                               result1.cause());
                         }
                       });
                 } else {
                   log.error("Failed to undeploy deployment on lost quorum", result.cause());
                 }
               });
         } finally {
           ContextImpl.setContext(ctx);
         }
       }
     }
   }
 }
 @Override
 public void goToLastHighlight() {
   FlooHandler handler = context.getFlooHandler();
   if (handler == null) {
     return;
   }
   handler.editorEventHandler.goToLastHighlight();
 }
Example #10
0
  @Test
  public void testAddAndGetSentence() throws KBApiException {
    System.out.println("Running testAddAndGetSentence");
    KBIndividualImpl i = KBIndividualImpl.findOrCreate("SomeRandom-LS");
    i.isInstanceOf(KBCollectionImpl.get("LogicalSchema"));
    KBPredicateImpl p = KBPredicateImpl.get("meaningSentenceOfSchema");
    List<Object> l = new ArrayList<Object>();
    l.add(KBPredicateImpl.get("isa"));
    l.add(i);
    l.add(p);
    Sentence s = new SentenceImpl(l.toArray());

    i.addFact(ContextImpl.get("UniversalVocabularyMt"), p, 1, s);

    Collection<Fact> lfs = i.getFacts(p, 1, ContextImpl.get("UniversalVocabularyMt"));
    KBIndividual iback = lfs.iterator().next().<KBIndividual>getArgument(1);
    Sentence a = lfs.iterator().next().<Sentence>getArgument(2);
    assertTrue("Didn't get sentence", (a instanceof Sentence));
  }
Example #11
0
 private void doDeploy(
     Verticle verticle,
     DeploymentOptions options,
     ContextImpl currentContext,
     Handler<AsyncResult<String>> completionHandler) {
   if (options.isMultiThreaded() && !options.isWorker()) {
     throw new IllegalArgumentException("If multi-threaded then must be worker too");
   }
   ContextImpl context =
       options.isWorker()
           ? vertx.createWorkerContext(options.isMultiThreaded())
           : vertx.createEventLoopContext();
   String deploymentID = UUID.randomUUID().toString();
   DeploymentImpl deployment = new DeploymentImpl(deploymentID, context, verticle);
   context.setDeployment(deployment);
   Deployment parent = currentContext.getDeployment();
   if (parent != null) {
     parent.addChild(deployment);
   }
   JsonObject conf = options.getConfig() == null ? null : options.getConfig().copy(); // Copy it
   context.runOnContext(
       v -> {
         try {
           verticle.setVertx(vertx);
           verticle.setConfig(conf);
           verticle.setDeploymentID(deploymentID);
           Future<Void> startFuture = new FutureResultImpl<>();
           verticle.start(startFuture);
           startFuture.setHandler(
               ar -> {
                 if (ar.succeeded()) {
                   deployments.put(deploymentID, deployment);
                   reportSuccess(deploymentID, currentContext, completionHandler);
                 } else {
                   reportFailure(ar.cause(), currentContext, completionHandler);
                 }
               });
         } catch (Throwable t) {
           reportFailure(t, currentContext, completionHandler);
         }
       });
 }
 @Override
 public IFile findFileByPath(String path) {
   String absPath = context.absPath(path);
   if (absPath == null) {
     return null;
   }
   VirtualFile fileByPath = instance.findFileByPath(absPath);
   if (fileByPath != null && fileByPath.isValid()) {
     return new FileImpl(fileByPath);
   }
   return null;
 }
Example #13
0
 /* (non-Javadoc)
  * @see com.cyc.kb.KBTerm#provablyNotInstanceOf(java.lang.String, java.lang.String)
  */
 @Override
 public boolean provablyNotInstanceOf(String colStr, String ctxStr) {
   ContextImpl ctx;
   KBCollectionImpl col;
   try {
     ctx = ContextImpl.get(ctxStr);
     col = KBCollectionImpl.get(colStr);
   } catch (KBApiException kae) {
     throw new IllegalArgumentException(kae.getMessage(), kae);
   }
   return provablyNotInstanceOf(col, ctx);
 }
 @Override
 public InputStream getData(String query) {
   contentStream = context.getDocBuilder().getReqParams().getContentStream();
   if (contentStream == null)
     throw new DataImportHandlerException(SEVERE, "No stream available. The request has no body");
   try {
     return in = contentStream.getStream();
   } catch (IOException e) {
     DataImportHandlerException.wrapAndThrow(SEVERE, e);
     return null;
   }
 }
Example #15
0
  @Test
  public void testFactFactories()
      throws KBTypeException, CreateException, ParseException, DeleteException {

    // Only GAFs can be Fact.class
    System.out.println("Running testFactFactories");
    System.out.println("Testing that only GAFs can be Facts");
    try {
      @SuppressWarnings("deprecation")
      Fact f = FactImpl.get(KBObjectImpl.getCore(TestConstants.flyingRule));
    } catch (KBObjectNotFoundException kboe) {
      // @todo isn't this an error/failure???
      System.out.println("Got Exception: " + kboe.toString());
    }

    SimpleDateFormat sdf = new SimpleDateFormat("yyyy MM dd HH:mm");
    Context airlineLogMt = ContextImpl.findOrCreate("SomeAirlineLogMt");
    KBIndividual flight = KBIndividualImpl.get("FlightXYZ-APITest");

    Date d = sdf.parse("2014 03 15 10:20");
    SentenceImpl s = new SentenceImpl(TestConstants.kbapitc.endingDate, flight, d);
    Assertion a = AssertionImpl.get(s, airlineLogMt);
    @SuppressWarnings("deprecation")
    Fact flDate = FactImpl.get(KBObjectImpl.getCore(a));

    System.out.println("Testing HLID Factory");
    String hlid = a.getId();
    Fact flDate1 = FactImpl.get(hlid);
    assertTrue("Fact not equal to one from HLID", flDate1 == flDate);

    // Get based on formula and context strings
    System.out.println("Testing factory get(formulaStr, ctxStr)");
    Fact flDate2 = FactImpl.get(s.toString(), airlineLogMt.toString());
    assertTrue("Fact not equal to one from formulaStr, ctxStr", flDate2 == flDate);

    System.out.println("Testing factory get(formula, ctx)");
    Fact flDate3 = FactImpl.get(s, airlineLogMt);
    assertTrue("Fact not equal to one from formula, ctx", flDate3 == flDate);

    FirstOrderCollectionImpl flying2Col = FirstOrderCollectionImpl.get("Flying-Move");
    KBIndividual flight2 =
        KBIndividualImpl.findOrCreate("FlightABC-APITest", flying2Col, airlineLogMt);

    SentenceImpl s2 = new SentenceImpl(TestConstants.kbapitc.endingDate, flight2, d);
    FactImpl fl2Date = FactImpl.findOrCreate(s2, airlineLogMt);
    fl2Date.delete();

    Fact fl3Date = FactImpl.findOrCreate(s2.toString(), airlineLogMt.toString());
    assertFalse("Probably failed to assert, HLID is empty string", fl2Date.getId().equals(""));
  }
Example #16
0
  @Test
  public void testAddList() throws KBApiException {
    System.out.println("Running testAddList");

    // FIXME: this test is erroring again RCyc 4.0q. - nwinant, 2015-07-03
    assumeCycSessionRequirement(NotResearchCycRequirement.NOT_RESEARCHCYC);

    List<String> cities = new ArrayList<String>();
    cities.add("CityOfLosAngelesCA");
    cities.add("CityOfBrusselsBelgium");
    cities.add("CityOfCairoEgypt");

    SentenceImpl s =
        new SentenceImpl(
            BinaryPredicateImpl.get("flightDestinationList"),
            KBIndividualImpl.get("FlyingAPlane-APITest"),
            cities);
    Fact f = new FactImpl(ContextImpl.get("BaseKB"), s);
    assertTrue("Failed to get a Fact", f instanceof Fact);
  }
  public String getPathForDoc(Document document) {
    if (document == null) {
      return null;
    }
    VirtualFile virtualFile = FileDocumentManager.getInstance().getFile(document);
    if (virtualFile == null) {
      return null;
    }

    String path;
    try {
      path = virtualFile.getPath();
    } catch (NullPointerException e) {
      return null;
    }
    if (!context.isShared(path)) {
      return null;
    }

    return path;
  }
 @Override
 public IDoc getDocument(String relPath) {
   IFile fileByPath = findFileByPath(context.absPath(relPath));
   return getDocument(fileByPath);
 }
Example #19
0
  @SuppressWarnings("unchecked")
  private void buildDocument(
      VariableResolverImpl vr,
      DocWrapper doc,
      Map<String, Object> pk,
      DataConfig.Entity entity,
      boolean isRoot,
      ContextImpl parentCtx) {

    EntityProcessorWrapper entityProcessor = getEntityProcessor(entity);

    ContextImpl ctx =
        new ContextImpl(
            entity,
            vr,
            null,
            pk == null ? Context.FULL_DUMP : Context.DELTA_DUMP,
            session,
            parentCtx,
            this);
    entityProcessor.init(ctx);
    Context.CURRENT_CONTEXT.set(ctx);

    if (requestParameters.start > 0) {
      writer.log(SolrWriter.DISABLE_LOGGING, null, null);
    }

    if (verboseDebug) {
      writer.log(SolrWriter.START_ENTITY, entity.name, null);
    }

    int seenDocCount = 0;

    try {
      while (true) {
        if (stop.get()) return;
        if (importStatistics.docCount.get() > (requestParameters.start + requestParameters.rows))
          break;
        try {
          seenDocCount++;

          if (seenDocCount > requestParameters.start) {
            writer.log(SolrWriter.ENABLE_LOGGING, null, null);
          }

          if (verboseDebug && entity.isDocRoot) {
            writer.log(SolrWriter.START_DOC, entity.name, null);
          }
          if (doc == null && entity.isDocRoot) {
            doc = new DocWrapper();
            ctx.setDoc(doc);
            DataConfig.Entity e = entity;
            while (e.parentEntity != null) {
              addFields(
                  e.parentEntity, doc, (Map<String, Object>) vr.resolve(e.parentEntity.name), vr);
              e = e.parentEntity;
            }
          }

          Map<String, Object> arow = entityProcessor.nextRow();
          if (arow == null) {
            break;
          }

          // Support for start parameter in debug mode
          if (entity.isDocRoot) {
            if (seenDocCount <= requestParameters.start) continue;
            if (seenDocCount > requestParameters.start + requestParameters.rows) {
              LOG.info("Indexing stopped at docCount = " + importStatistics.docCount);
              break;
            }
          }

          if (verboseDebug) {
            writer.log(SolrWriter.ENTITY_OUT, entity.name, arow);
          }
          importStatistics.rowsCount.incrementAndGet();
          if (doc != null) {
            handleSpecialCommands(arow, doc);
            addFields(entity, doc, arow, vr);
          }
          if (entity.entities != null) {
            vr.addNamespace(entity.name, arow);
            for (DataConfig.Entity child : entity.entities) {
              buildDocument(vr, doc, child.isDocRoot ? pk : null, child, false, ctx);
            }
            vr.removeNamespace(entity.name);
          }
          /*The child entities would have changed the CURRENT_CONTEXT. So when they are done, set it back to the old.
           *
           */
          Context.CURRENT_CONTEXT.set(ctx);

          if (entity.isDocRoot) {
            if (stop.get()) return;
            if (!doc.isEmpty()) {
              boolean result = writer.upload(doc);
              doc = null;
              if (result) {
                importStatistics.docCount.incrementAndGet();
              } else {
                importStatistics.failedDocCount.incrementAndGet();
              }
            }
          }

        } catch (DataImportHandlerException e) {
          if (verboseDebug) {
            writer.log(SolrWriter.ENTITY_EXCEPTION, entity.name, e);
          }
          if (e.getErrCode() == DataImportHandlerException.SKIP_ROW) {
            continue;
          }
          if (isRoot) {
            if (e.getErrCode() == DataImportHandlerException.SKIP) {
              importStatistics.skipDocCount.getAndIncrement();
              doc = null;
            } else {
              LOG.error("Exception while processing: " + entity.name + " document : " + doc, e);
            }
            if (e.getErrCode() == DataImportHandlerException.SEVERE) throw e;
          } else throw e;
        } catch (Throwable t) {
          if (verboseDebug) {
            writer.log(SolrWriter.ENTITY_EXCEPTION, entity.name, t);
          }
          throw new DataImportHandlerException(DataImportHandlerException.SEVERE, t);
        } finally {
          if (verboseDebug) {
            writer.log(SolrWriter.ROW_END, entity.name, null);
            if (entity.isDocRoot) writer.log(SolrWriter.END_DOC, null, null);
            Context.CURRENT_CONTEXT.remove();
          }
        }
      }
    } finally {
      if (verboseDebug) {
        writer.log(SolrWriter.END_ENTITY, null, null);
      }
      entityProcessor.destroy();
    }
  }
Example #20
0
 /* (non-Javadoc)
  * @see com.cyc.kb.Relation#addArgGenl(int, java.lang.String, java.lang.String)
  */
 @Override
 public Relation addArgGenl(int argPos, String colStr, String ctxStr)
     throws KBTypeException, CreateException {
   return addArgGenl(argPos, KBCollectionImpl.get(colStr), ContextImpl.get(ctxStr));
 }
Example #21
0
 private void runAThread(ThreadedEntityProcessorWrapper epw, EntityRow rows, String currProcess)
     throws Exception {
   currentEntityProcWrapper.set(epw);
   epw.threadedInit(context);
   initEntity();
   try {
     epw.init(rows);
     DocWrapper docWrapper = this.docWrapper;
     Context.CURRENT_CONTEXT.set(context);
     for (; ; ) {
       if (DocBuilder.this.stop.get()) break;
       try {
         Map<String, Object> arow = epw.nextRow();
         if (arow == null) {
           break;
         } else {
           importStatistics.rowsCount.incrementAndGet();
           if (docWrapper == null && entity.isDocRoot) {
             docWrapper = new DocWrapper();
             context.setDoc(docWrapper);
             DataConfig.Entity e = entity.parentEntity;
             for (EntityRow row = rows;
                 row != null && e != null;
                 row = row.tail, e = e.parentEntity) {
               addFields(e, docWrapper, row.row, epw.resolver);
             }
           }
           if (docWrapper != null) {
             handleSpecialCommands(arow, docWrapper);
             addFields(entity, docWrapper, arow, epw.resolver);
           }
           if (entity.entities != null) {
             EntityRow nextRow = new EntityRow(arow, rows, entity.name);
             for (DataConfig.Entity e : entity.entities) {
               epw.children.get(e).run(docWrapper, currProcess, nextRow);
             }
           }
         }
         if (entity.isDocRoot) {
           LOG.info("a row on docroot" + docWrapper);
           if (!docWrapper.isEmpty()) {
             LOG.info("adding a doc " + docWrapper);
             boolean result = writer.upload(docWrapper);
             docWrapper = null;
             if (result) {
               importStatistics.docCount.incrementAndGet();
             } else {
               importStatistics.failedDocCount.incrementAndGet();
             }
           }
         }
       } catch (DataImportHandlerException dihe) {
         exception = dihe;
         if (dihe.getErrCode() == SKIP_ROW || dihe.getErrCode() == SKIP) {
           importStatistics.skipDocCount.getAndIncrement();
           exception = null; // should not propogate up
           continue;
         }
         if (entity.isDocRoot) {
           if (dihe.getErrCode() == DataImportHandlerException.SKIP) {
             importStatistics.skipDocCount.getAndIncrement();
             exception = null; // should not propogate up
           } else {
             LOG.error(
                 "Exception while processing: " + entity.name + " document : " + docWrapper,
                 dihe);
           }
           if (dihe.getErrCode() == DataImportHandlerException.SEVERE) throw dihe;
         } else {
           // if this is not the docRoot then the execution has happened in the same thread. so
           // propogate up,
           // it will be handled at the docroot
           entityEnded.set(true);
           throw dihe;
         }
         entityEnded.set(true);
       }
     }
   } finally {
     epw.destroy();
     currentEntityProcWrapper.remove();
     Context.CURRENT_CONTEXT.remove();
   }
 }