Пример #1
0
 public void classifyOntology(OWLOntology ontology, OWLReasonerFactory factory) {
   ToStringRenderer.getInstance().setRenderer(new ManchesterOWLSyntaxOWLObjectRendererImpl());
   OWLReasoner reasoner = factory.createNonBufferingReasoner(ontology);
   // reasoner.precomputeInferences(InferenceType.values());
   long time = System.currentTimeMillis();
   boolean isConsistent = reasoner.isConsistent();
   int numOfUnsatClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom().size();
   time = System.currentTimeMillis() - time;
   DLExpressivityChecker checker = new DLExpressivityChecker(Collections.singleton(ontology));
   String e = checker.getDescriptionLogicName();
   String name = ontology.getOntologyID().getOntologyIRI().getFragment();
   logger.info(
       "ontology: "
           + name
           + ", reasoner: "
           + factory.getReasonerName()
           + ", expressivity: "
           + e
           + ", consistent: "
           + isConsistent
           + ", unsat classes: "
           + numOfUnsatClasses
           + ", time: "
           + time);
 }
  private void handleNotesOntologyChanged(List<OWLOntologyChange> changes) {
    try {
      OWLOntologyManager notesOntologyManager = notesOntology.getOWLOntologyManager();
      if (notesOntologyManager.getOntologyFormat(notesOntology)
          instanceof BinaryOWLOntologyDocumentFormat) {
        OWLAPIProjectDocumentStore documentStore =
            OWLAPIProjectDocumentStore.getProjectDocumentStore(project.getProjectId());
        List<OWLOntologyChangeData> infoList = new ArrayList<OWLOntologyChangeData>();
        for (OWLOntologyChange change : changes) {
          OWLOntologyChangeRecord rec = change.getChangeRecord();
          OWLOntologyChangeData info = rec.getData();
          infoList.add(info);
        }
        BinaryOWLOntologyDocumentSerializer serializer = new BinaryOWLOntologyDocumentSerializer();
        serializer.appendOntologyChanges(
            notesOntologyDocument,
            new OntologyChangeDataList(infoList, System.currentTimeMillis()));
      } else {
        // Swap it over
        notesOntologyManager.saveOntology(notesOntology, new BinaryOWLOntologyDocumentFormat());
      }

    } catch (OWLOntologyStorageException e) {
      throw new RuntimeException(e);
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
  }
Пример #3
0
  protected OWLOntology loadBigOntology(String filename) {

    File file = new File(System.getenv("bigontosdir") + filename + ".owl");
    OWLOntology ontology = null;
    try {
      ontology = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(file);
    } catch (OWLOntologyCreationException e) {
      e
          .printStackTrace(); // To change body of catch statement use File | Settings | File
                              // Templates.
    }
    return ontology;
  }
Пример #4
0
  /** Serializes the A-box corresponding to a predicate into one or multiple file. */
  private static void serializePredicate(
      OWLOntology ontology,
      Ontology inputOntology,
      OBDAModel obdaModel,
      Predicate predicate,
      String outputFile,
      String format)
      throws Exception {
    final long startTime = System.currentTimeMillis();

    OWLAPI3Materializer materializer =
        new OWLAPI3Materializer(obdaModel, inputOntology, predicate, DO_STREAM_RESULTS);
    QuestOWLIndividualAxiomIterator iterator = materializer.getIterator();

    System.err.println("Starts writing triples into files.");

    int tripleCount = 0;
    int fileCount = 0;

    String outputDir = outputFile;
    String filePrefix =
        Paths.get(outputDir, predicate.getName().replaceAll("[^a-zA-Z0-9]", "_") + "_").toString();

    while (iterator.hasNext()) {
      tripleCount +=
          serializeTripleBatch(
              ontology, iterator, filePrefix, predicate.getName(), fileCount, format);
      fileCount++;
    }

    System.out.println("NR of TRIPLES: " + tripleCount);
    System.out.println("VOCABULARY SIZE (NR of QUERIES): " + materializer.getVocabularySize());

    final long endTime = System.currentTimeMillis();
    final long time = endTime - startTime;
    System.out.println("Elapsed time to materialize: " + time + " {ms}");
  }
Пример #5
0
  public void runWithSingleFile() {
    BufferedOutputStream output = null;
    BufferedWriter writer = null;

    try {
      final long startTime = System.currentTimeMillis();

      if (outputFile != null) {
        output = new BufferedOutputStream(new FileOutputStream(outputFile));
      } else {
        output = new BufferedOutputStream(System.out);
      }
      writer = new BufferedWriter(new OutputStreamWriter(output, "UTF-8"));

      OBDAModel obdaModel = loadMappingFile(mappingFile);

      OWLOntology ontology = null;
      OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
      OWLAPI3Materializer materializer = null;

      if (owlFile != null) {
        // Loading the OWL ontology from the file as with normal OWLReasoners
        ontology = manager.loadOntologyFromOntologyDocument((new File(owlFile)));

        if (disableReasoning) {
          /*
           * when reasoning is disabled, we extract only the declaration assertions for the vocabulary
           */
          ontology = extractDeclarations(manager, ontology);
        }

        Ontology onto = OWLAPI3TranslatorUtility.translate(ontology);
        obdaModel.declareAll(onto.getVocabulary());
        materializer = new OWLAPI3Materializer(obdaModel, onto, DO_STREAM_RESULTS);
      } else {
        ontology = manager.createOntology();
        materializer = new OWLAPI3Materializer(obdaModel, DO_STREAM_RESULTS);
      }

      // OBDAModelSynchronizer.declarePredicates(ontology, obdaModel);

      QuestOWLIndividualAxiomIterator iterator = materializer.getIterator();

      while (iterator.hasNext()) manager.addAxiom(ontology, iterator.next());

      OWLOntologyFormat ontologyFormat = getOntologyFormat(format);

      manager.saveOntology(ontology, ontologyFormat, new WriterDocumentTarget(writer));

      System.err.println("NR of TRIPLES: " + materializer.getTriplesCount());
      System.err.println("VOCABULARY SIZE (NR of QUERIES): " + materializer.getVocabularySize());

      materializer.disconnect();
      if (outputFile != null) output.close();

      final long endTime = System.currentTimeMillis();
      final long time = endTime - startTime;
      System.out.println("Elapsed time to materialize: " + time + " {ms}");

    } catch (Exception e) {
      System.out.println("Error materializing ontology:");
      e.printStackTrace();
    }
  }