示例#1
0
  private void getPelletIncrementalClassifierRunTime(String baseOnt, String ontDir)
      throws Exception {
    System.out.println("Using Pellet Incremental Classifier...");
    GregorianCalendar start = new GregorianCalendar();
    File ontFile = new File(baseOnt);
    IRI documentIRI = IRI.create(ontFile);
    OWLOntology baseOntology = OWL.manager.loadOntology(documentIRI);
    IncrementalClassifier classifier = new IncrementalClassifier(baseOntology);
    classifier.classify();
    System.out.println("Logical axioms: " + baseOntology.getLogicalAxiomCount());
    System.out.println("Time taken for base ontology (millis): " + Util.getElapsedTime(start));
    File ontDirPath = new File(ontDir);
    File[] allFiles = ontDirPath.listFiles();
    OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    addTripsBaseOntologies(manager);
    int count = 1;
    for (File owlFile : allFiles) {
      IRI owlDocumentIRI = IRI.create(owlFile);
      OWLOntology ontology = manager.loadOntologyFromOntologyDocument(owlDocumentIRI);
      Set<OWLLogicalAxiom> axioms = ontology.getLogicalAxioms();
      for (OWLLogicalAxiom axiom : axioms)
        OWL.manager.applyChange(new AddAxiom(baseOntology, axiom));

      System.out.println("\nLogical axioms: " + baseOntology.getLogicalAxiomCount());
      System.out.println(count + "  file: " + owlFile.getName());
      //			System.out.println("" + count + "  file: " + owlFile.getName());
      //			GregorianCalendar start2 = new GregorianCalendar();
      classifier.classify();
      //	        System.out.println("Time taken (millis): " +
      //					Util.getElapsedTime(start2));
      manager.removeOntology(ontology);
      count++;
    }
    System.out.println("\nTotal time taken (millis): " + Util.getElapsedTime(start));
  }
示例#2
0
  public void getELKIncrementalRuntime(String baseOnt, String ontDir) throws Exception {
    GregorianCalendar start = new GregorianCalendar();
    OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    IRI documentIRI = IRI.create(new File(baseOnt));
    OWLOntology baseOntology = manager.loadOntology(documentIRI);
    System.out.println("Logical axioms: " + baseOntology.getLogicalAxiomCount());
    //		findDataHasValueAxiom(baseOntology.getAxioms(AxiomType.SUBCLASS_OF));

    OWLReasonerFactory reasonerFactory = new ElkReasonerFactory();
    OWLReasoner reasoner = reasonerFactory.createReasoner(baseOntology);
    reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);
    File[] files = new File(ontDir).listFiles();
    int count = 0;
    OWLOntologyManager manager2 = OWLManager.createOWLOntologyManager();
    addTripsBaseOntologies(manager2);
    for (File file : files) {
      System.out.println("File name: " + file.getName());
      documentIRI = IRI.create(file);
      OWLOntology ontology = manager2.loadOntology(documentIRI);
      Set<OWLLogicalAxiom> axioms = ontology.getLogicalAxioms();
      //	    	findDataHasValueAxiom(ontology.getAxioms(AxiomType.SUBCLASS_OF));
      manager.addAxioms(baseOntology, axioms);
      reasoner.flush();
      System.out.println("Logical axioms: " + baseOntology.getLogicalAxiomCount());

      // From the ELK wiki, it seems ABox reasoning will trigger TBox reasoning
      reasoner.precomputeInferences(InferenceType.CLASS_ASSERTIONS);

      manager2.removeOntology(ontology);
      count++;
      System.out.println("Done with " + count);
      //	    	if(count == 5)
      //	    		break;
    }
    reasoner.dispose();
    System.out.println("Time taken (millis): " + Util.getElapsedTime(start));
  }
示例#3
0
  private void precomputeAndCheckResults(OWLOntology ontology) throws Exception {
    System.out.println("Not Normalizing");

    PropertyFileHandler propertyFileHandler = PropertyFileHandler.getInstance();
    HostInfo resultNodeHostInfo = propertyFileHandler.getResultNode();
    // port: 6489 for snapshot testing
    Jedis resultStore =
        new Jedis(
            resultNodeHostInfo.getHost(), resultNodeHostInfo.getPort(), Constants.INFINITE_TIMEOUT);
    Jedis resultStore2 =
        new Jedis(
            resultNodeHostInfo.getHost(), resultNodeHostInfo.getPort(), Constants.INFINITE_TIMEOUT);
    resultStore2.select(2);
    HostInfo localHostInfo = propertyFileHandler.getLocalHostInfo();
    Jedis localStore = new Jedis(localHostInfo.getHost(), localHostInfo.getPort());
    Set<String> idHosts =
        localStore.zrange(
            AxiomDistributionType.CONCEPT_ID.toString(),
            Constants.RANGE_BEGIN,
            Constants.RANGE_END);
    // currently there is only one ID node
    String[] idHostPort = idHosts.iterator().next().split(":");
    Jedis idReader =
        new Jedis(idHostPort[0], Integer.parseInt(idHostPort[1]), Constants.INFINITE_TIMEOUT);
    GregorianCalendar cal1 = new GregorianCalendar();
    try {
      //		    OWLReasonerFactory reasonerFactory = new ElkReasonerFactory();
      //		    OWLReasoner reasoner = reasonerFactory.createReasoner(ontology);
      //		    reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);
      //		    PelletReasoner reasoner = PelletReasonerFactory.getInstance().
      //		    									createReasoner( ontology );
      //		    reasoner.prepareReasoner();

      //	    Reasoner hermitReasoner = new Reasoner(ontology);
      //	    hermitReasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);

      //	    	OWLReasonerFactory reasonerFactory = new ElkReasonerFactory();
      //		    OWLReasoner reasoner = reasonerFactory.createReasoner(ontology);
      //		    reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);

      //		    RELReasonerFactory relfactory = new RELReasonerFactory();
      //		    RELReasoner reasoner = relfactory.createReasoner(ontology);
      //		    reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);

      //	    	JcelReasoner reasoner = new JcelReasoner(ontology, false);
      //		    reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);

      OWLReasonerFactory reasonerFactory = new ElkReasonerFactory();
      OWLReasoner reasoner = reasonerFactory.createReasoner(ontology);
      reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);

      System.out.println("Reasoner completed in (millis): " + Util.getElapsedTime(cal1));

      System.out.println("Comparing results using ELK.....");
      rearrangeAndCompareResults(ontology, reasoner, resultStore, resultStore2, idReader);

      //		    pelletReasoner.dispose();
      //	   		reasonerELK.dispose();
      reasoner.dispose();
    } finally {
      localStore.disconnect();
      resultStore.disconnect();
      resultStore2.disconnect();
      idReader.disconnect();
    }
  }