Esempio n. 1
0
  /**
   * Initialise les composants du conteneur sémantique une fois les URI des ontologies dispatchées
   * par OwlDistributionInitializer.
   */
  public void semInit() throws ExceptionInInitializerError {
    if (isInitialized()) {
      /** Initialisation de l'ontologie locale. Cette ontologie n'est plus modifiée ensuite. */
      // Initialisation du manager de l'ontologie locale. N'est utilisé qu'ici.
      OWLOntologyManager localOntoManager = OWLManager.createOWLOntologyManager();
      OWLOntologyIRIMapper localOntoMapper =
          new SimpleIRIMapper(getOntologyIri(), getPhysicalIri());
      localOntoManager.addIRIMapper(localOntoMapper);
      // Initialisation de l'ontologie
      try {
        // On crée l'ontologie locale.
        this.localOntology = localOntoManager.loadOntology(getPhysicalIri());
      } catch (OWLOntologyCreationException e) {
        e.printStackTrace();
      }
      // System.out.println("localOntology : " + localOntology + " ; pyhsicalIri : " +
      // getPhysicalIri().toString());

      /**
       * Initialisation de l'ontologie du reasoner, qui contiendra l'ontologie locale plus d'autres
       * axiomes éventuels. On crée l'instance, qu'on rattache à un manager et un reasoner
       * accessibles par des getters. L'instance reste la même, mais le contenu change.
       */
      // Initialisation du manager de l'ontologie du reasoner
      this.manager = OWLManager.createOWLOntologyManager();
      // On crée une deuxième ontologie.
      try {
        IRI ontoName = IRI.create(this.nameSpace);
        OWLOntologyID ontoId = new OWLOntologyID(ontoName);
        this.reasoningOntology = this.manager.createOntology(ontoId);
      } catch (OWLOntologyCreationException e) {
        e.printStackTrace();
      }
      // Initialisation du reasoner
      PelletReasonerFactory reasonerFactory = PelletReasonerFactory.getInstance();

      PelletOptions.USE_INCREMENTAL_CONSISTENCY = true;
      PelletOptions.USE_COMPLETION_QUEUE = true;

      // PelletReasoner reasoner = reasonerFactory.createReasoner(reasoningOntology);
      PelletReasoner reasoner = reasonerFactory.createNonBufferingReasoner(reasoningOntology);

      // add the reasoner as an ontology change listener
      this.manager.addOntologyChangeListener(reasoner);

      reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);
      /*			reasoner.precomputeInferences(InferenceType.CLASS_ASSERTIONS);
      			reasoner.precomputeInferences(InferenceType.DATA_PROPERTY_HIERARCHY);
      			reasoner.precomputeInferences(InferenceType.DISJOINT_CLASSES);
      			reasoner.precomputeInferences(InferenceType.OBJECT_PROPERTY_HIERARCHY);
      			reasoner.precomputeInferences(InferenceType.SAME_INDIVIDUAL);

      */ this.reasoner = reasoner;
    } else
      throw new ExceptionInInitializerError(
          "Paramètres de l'ontologie non initialisés ; ontoParamsInit = "
              + Reflection.getCallerClass(2));

    // initOntoReasoner();
  }
 public OWLOntology getMergedOntology() {
   final IRI mergedOntologyIRI =
       IRI.create(queryOntology.getOntologyID().getDefaultDocumentIRI() + "-merged");
   final OWLOntologyManager mm = controller.getOWLOntologyManager();
   if (mm.contains(mergedOntologyIRI)) {
     return mm.getOntology(mergedOntologyIRI);
   } else {
     try {
       final OWLOntology mergedOntology = mm.createOntology(mergedOntologyIRI);
       mm.setOntologyFormat(mergedOntology, new RDFXMLOntologyFormat());
       final String mergedOntologyFileName =
           mergedOntologyIRI
                   .toURI()
                   .toString()
                   .substring(mergedOntologyIRI.toURI().toString().lastIndexOf("/") + 1)
               + ".owl";
       mm.setOntologyDocumentIRI(
           mergedOntology,
           IRI.create(
               controller.getRuleSpec().getOutputDir().toURI() + "/" + mergedOntologyFileName));
       mm.applyChange(
           new AddImport(
               mergedOntology,
               mm.getOWLDataFactory()
                   .getOWLImportsDeclaration(
                       queryOntology.getOntologyID().getDefaultDocumentIRI())));
       return mergedOntology;
     } catch (OWLOntologyCreationException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
       return null;
     }
   }
 }
Esempio n. 3
0
 @Override
 protected boolean exec() {
   Timer t = new Timer();
   if (ontology == null) {
     lSigExtractor = null;
     return true;
   }
   if (integrateRangesFirst) {
     OWLNormalization4MORe normalization =
         new OWLNormalization4MORe(ontology, true, false, false);
     Set<OWLAxiom> axioms = normalization.getNormalizedOntology();
     try {
       OWLOntologyManager manager = ontology.getOWLOntologyManager();
       ontology = manager.createOntology();
       manager.addAxioms(ontology, axioms);
     } catch (OWLOntologyCreationException e) {
       e.printStackTrace();
       lSigExtractor = null;
       return true;
     }
   }
   lSigExtractor.findLsignature(ontology, fragment);
   if (!integrateRangesFirst) stats.updateNelkAxioms(lSigExtractor.nAxiomsInFragment());
   Logger_MORe.logDebug(
       t.duration() + "s to find Lsignature with integrateRangesFirst=" + integrateRangesFirst);
   return true;
 }
 private TaxonStore getStore(String targetStr, String prefixStr, String formatStr) {
   File targetFile = getSourceFile(targetStr);
   if (targetFile.exists()) targetFile.delete();
   if (OBOFORMATSTR.equals(formatStr)) {
     return new OBOStore(
         targetFile.getAbsolutePath(), prefixStr, prefixStr.toLowerCase() + NAMESPACESUFFIX);
   }
   if (OWLFORMATSTR.equals(formatStr)) {
     try {
       return new OWLStore(
           targetFile.getAbsolutePath(), prefixStr, prefixStr.toLowerCase() + NAMESPACESUFFIX);
     } catch (OWLOntologyCreationException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
     }
   }
   // these source formats aren't storage formats (there's no ontology library for them) so the
   // store is implementation dependent (currently OBO)
   if (XREFFORMATSTR.equals(formatStr)
       || // XREF isn't a storage format, so the store is
       COLUMNFORMATSTR.equals(formatStr)
       || SYNONYMFORMATSTR.equals(formatStr)
       || ALLCOLUMNSFORMATSTR.equals(formatStr)) {
     return new OBOStore(
         targetFile.getAbsolutePath(), prefixStr, prefixStr.toLowerCase() + NAMESPACESUFFIX);
   }
   logger.error("Format " + formatStr + " not supported for merging");
   return null;
 }
  public static void main(String[] args) {
    try {
      SimpleRenderer renderer = new SimpleRenderer();
      renderer.setShortFormProvider(
          new DefaultPrefixManager("http://www.mindswap.org/ontologies/tambis-full.owl#"));
      ToStringRenderer.getInstance().setRenderer(renderer);
      OWLOntologyManager man = OWLManager.createOWLOntologyManager();
      OWLOntology ont =
          man.loadOntology(
              IRI.create(
                  "http://owl.cs.manchester.ac.uk/repository/download?ontology=http://www.cs.manchester.ac.uk/owl/ontologies/tambis-patched.owl"));

      System.out.println("Loaded!");
      OWLReasonerFactory reasonerFactory = PelletReasonerFactory.getInstance();
      OWLReasoner reasoner = reasonerFactory.createNonBufferingReasoner(ont);
      reasoner.getUnsatisfiableClasses();
      ExplanationBasedRootClassFinder rdr =
          new ExplanationBasedRootClassFinder(man, reasoner, reasonerFactory);
      for (OWLClass cls : rdr.getRootUnsatisfiableClasses()) System.out.println("ROOT! " + cls);
    } catch (TimeOutException e) {
      e.printStackTrace();
    } catch (ReasonerInterruptedException e) {
      e.printStackTrace();
    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    }
  }
Esempio n. 6
0
  @Test
  public void test03() {
    File file = new File("TestData/EliminateTransTest03.owl");
    OWLOntologyManager man = OWLManager.createOWLOntologyManager();
    OWLOntology ontology;
    try {
      ontology = man.loadOntologyFromOntologyDocument(file);

      HornSHIQNormalizer normalizer = new HornSHIQNormalizer();
      OWLOntology normalizedOnt = normalizer.normalize(ontology);

      HornALCHIQTransNormalizer normalizer1 = new HornALCHIQTransNormalizer();
      OWLOntology normalizedOnt1 = normalizer1.normalize(normalizedOnt);

      HornALCHIQNormalizer normalizer2 = new HornALCHIQNormalizer();
      OWLOntology normalizedOnt2 = normalizer2.normalize(normalizedOnt1);
      man.saveOntology(
          normalizedOnt2, IRI.create(new File("TestData/EliminateTransTest03Norm.owl")));
    } catch (OWLOntologyCreationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (OWLOntologyStorageException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
  }
Esempio n. 7
0
  private void runWithSeparateFiles() {
    if (owlFile == null) {
      throw new NullPointerException("You have to specify an ontology file!");
    }

    OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    OWLOntology ontology = null;
    OBDADataFactory obdaDataFactory = OBDADataFactoryImpl.getInstance();
    try {
      ontology = manager.loadOntologyFromOntologyDocument((new File(owlFile)));

      if (disableReasoning) {
        /*
         * when reasoning is disabled, we extract only the declaration assertions for the vocabulary
         */
        ontology = extractDeclarations(manager, ontology);
      }

      Collection<Predicate> predicates = new ArrayList<>();

      for (OWLClass owlClass : ontology.getClassesInSignature()) {
        Predicate predicate = obdaDataFactory.getClassPredicate(owlClass.getIRI().toString());
        predicates.add(predicate);
      }
      for (OWLDataProperty owlDataProperty : ontology.getDataPropertiesInSignature()) {
        Predicate predicate =
            obdaDataFactory.getDataPropertyPredicate(owlDataProperty.getIRI().toString());
        predicates.add(predicate);
      }
      for (OWLObjectProperty owlObjectProperty : ontology.getObjectPropertiesInSignature()) {
        Predicate predicate =
            obdaDataFactory.getObjectPropertyPredicate(owlObjectProperty.getIRI().toString());
        predicates.add(predicate);
      }

      OBDAModel obdaModel = loadMappingFile(mappingFile);

      Ontology inputOntology = OWLAPI3TranslatorUtility.translate(ontology);

      obdaModel.declareAll(inputOntology.getVocabulary());

      int numPredicates = predicates.size();

      int i = 1;
      for (Predicate predicate : predicates) {
        System.err.println(String.format("Materializing %s (%d/%d)", predicate, i, numPredicates));
        serializePredicate(ontology, inputOntology, obdaModel, predicate, outputFile, format);
        i++;
      }

    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
 public PartOfOntology(String directory, String file) {
   OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
   try {
     ontology = manager.loadOntologyFromOntologyDocument(new File(directory, file));
   } catch (OWLOntologyCreationException e) {
     e.printStackTrace();
     log(LogLevel.DEBUG, "Failed to load file " + file + " from directory " + directory);
     return;
   }
   success = true;
 }
Esempio n. 9
0
 public LsignatureExtractorViaInverseRewritingLauncher(
     OWLOntology ontology, LogicFragment fragment) {
   this.ontology = null;
   try {
     manager = ontology.getOWLOntologyManager();
     this.ontology = manager.createOntology();
     manager.addAxioms(this.ontology, ontology.getAxioms());
   } catch (OWLOntologyCreationException e) {
     e.printStackTrace();
   }
   this.fragment = fragment;
 }
Esempio n. 10
0
 public LsignatureExtractorLauncher(
     OWLOntology ontology, LogicFragment fragment, boolean integrateRangesFirst) {
   this.ontology = null;
   try {
     OWLOntologyManager manager = ontology.getOWLOntologyManager();
     this.ontology = manager.createOntology();
     manager.addAxioms(this.ontology, ontology.getAxioms());
   } catch (OWLOntologyCreationException e) {
     e.printStackTrace();
   }
   this.fragment = fragment;
   this.integrateRangesFirst = integrateRangesFirst;
 }
Esempio n. 11
0
  protected OWLOntology loadBigOntology(String filename) {

    File file = new File(System.getenv("bigontosdir") + filename + ".owl");
    OWLOntology ontology = null;
    try {
      ontology = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(file);
    } catch (OWLOntologyCreationException e) {
      e
          .printStackTrace(); // To change body of catch statement use File | Settings | File
                              // Templates.
    }
    return ontology;
  }
Esempio n. 12
0
  @Override
  public boolean test() {
    // Load an example ontology.
    OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    OWLOntology ontology = null;
    try {
      ontology = manager.loadOntologyFromOntologyDocument(new File("model/mcs_ontology.owl"));
    } catch (OWLOntologyCreationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    // We need a reasoner to do our query answering

    // These two lines are the only relevant difference between this code
    // and the original example
    // This example uses HermiT: http://hermit-reasoner.com/
    OWLReasoner reasoner = new Reasoner.ReasonerFactory().createReasoner(ontology);

    ShortFormProvider shortFormProvider = new SimpleShortFormProvider();
    // Create the DLQueryPrinter helper class. This will manage the
    // parsing of input and printing of results
    DLQueryPrinter dlQueryPrinter =
        new DLQueryPrinter(new DLQueryEngine(reasoner, shortFormProvider), shortFormProvider);
    // Enter the query loop. A user is expected to enter class
    // expression on the command line.
    BufferedReader br = null;
    try {
      br = new BufferedReader(new InputStreamReader(System.in, "UTF-8"));
    } catch (UnsupportedEncodingException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    while (true) {
      System.out.println(
          "Type a class expression in Manchester Syntax and press Enter (or press x to exit):");
      String classExpression = null;
      try {
        classExpression = br.readLine();
      } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }
      // Check for exit condition
      if (classExpression == null || classExpression.equalsIgnoreCase("x")) {
        break;
      }
      dlQueryPrinter.askQuery(classExpression.trim());
      System.out.println();
    }
    return true;
  }
Esempio n. 13
0
  static {
    try {
      /** Configuration of the manager */
      manager.setSilentMissingImportsHandling(true);
      manager.addMissingImportListener(
          new MissingImportListener() {
            @Override
            public void importMissing(MissingImportEvent event) {
              log.warn("Missing import! URI was: {}", event.getImportedOntologyURI());
            }
          });
      manager.addOntologyChangeListener(
          new OWLOntologyChangeListener() {
            @Override
            public void ontologiesChanged(List<? extends OWLOntologyChange> arg0)
                throws OWLException {
              for (OWLOntologyChange change : arg0) {
                log.debug("{} TO {}", change, change.getOntology());
              }
            }
          });
      /** Loading test ontologies once for all */
      manager.loadOntologyFromOntologyDocument(FOAF_LOCATION.openStream());
      manager.loadOntologyFromOntologyDocument(DBPEDIA_LOCATION.openStream());
      // Test 1
      manager.loadOntologyFromOntologyDocument(TEST_1_LOCATION.openStream());
      manager.loadOntologyFromOntologyDocument(TEST_1_expected_LOCATION.openStream());
      // Test 2
      manager.loadOntologyFromOntologyDocument(TEST_2_LOCATION.openStream());
      manager.loadOntologyFromOntologyDocument(TEST_2_expected_LOCATION.openStream());
      // Test 3
      manager.loadOntologyFromOntologyDocument(TEST_3_LOCATION.openStream());
      manager.loadOntologyFromOntologyDocument(TEST_3_rules_LOCATION.openStream());
      manager.loadOntologyFromOntologyDocument(TEST_3_expected_LOCATION.openStream());

    } catch (OWLOntologyCreationException e) {
      log.error(
          "A {} have been thrown while creating the ontology" + ". Message was: {}",
          e.getClass(),
          e.getLocalizedMessage());
    } catch (IOException e) {
      log.error(
          "A {} have been thrown while loading the ontology from the location {}",
          e.getClass(),
          FOAF_LOCATION.toString());
      log.error("Message was: {}", e.getLocalizedMessage());
    }
  }
Esempio n. 14
0
 @Override
 public boolean prepare() {
   // Create the manager
   manager = OWLManager.createOWLOntologyManager();
   // File with an existing ontology - make sure it's there!
   fClass = new File("model/mcs_ontology.owl");
   // Load the ontology from the file
   try {
     ontology = manager.loadOntologyFromOntologyDocument(fClass);
   } catch (OWLOntologyCreationException e) {
     // TODO Auto-generated catch block
     e.printStackTrace();
   }
   factory = new MyFactory(ontology);
   return true;
 }
Esempio n. 15
0
  @Override
  public boolean test() {
    // Check if the ontology contains any axioms
    System.out.println("Number of axioms: " + ontology.getAxiomCount());
    // Every ontology has a unique ID.
    System.out.println("Current Ontology ID: " + ontology.getOntologyID());
    // test of CRUD
    // test of Create
    System.out.println("Number of children: " + factory.getAllChildInstances().size());
    System.out.println("Create a new child ");
    factory.createChild("Nicola");
    System.out.println("Number of children: " + factory.getAllChildInstances().size());
    // test of Read
    Child c = factory.getChild("Nicola");
    System.out.println(c.getOwlIndividual());
    // TODO: test of Update

    // test of Delete
    c.delete();
    System.out.println("Number of children: " + factory.getAllChildInstances().size());

    // save ABox, TBox, RBox to separate files.
    try {
      ontlgAbox = manager.createOntology(ontology.getABoxAxioms(true));
      ontlgTbox = manager.createOntology(ontology.getTBoxAxioms(true));
      ontlgRbox = manager.createOntology(ontology.getRBoxAxioms(true));
    } catch (OWLOntologyCreationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    try {
      manager.saveOntology(ontlgAbox, IRI.create(new File("individual/Abox.owl")));
      manager.saveOntology(ontlgTbox, IRI.create(new File("individual/Tbox.owl")));
      manager.saveOntology(ontlgRbox, IRI.create(new File("individual/Rbox.owl")));
    } catch (OWLOntologyStorageException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    return true;
  }
  public Set<OWLClass> getRootUnsatisfiableClasses() {
    //	    StructureBasedRootClassFinder srd = new StructureBasedRootClassFinder(this.baseReasoner);
    StructuralRootDerivedReasoner srd =
        new StructuralRootDerivedReasoner(this.manager, this.baseReasoner, this.reasonerFactory);
    Set<OWLClass> estimatedRoots = srd.getRootUnsatisfiableClasses();
    this.cls2JustificationMap = new HashMap<OWLClass, Set<Explanation>>();
    Set<OWLAxiom> allAxioms = new HashSet<OWLAxiom>();

    for (OWLOntology ont : this.baseReasoner.getRootOntology().getImportsClosure()) {
      allAxioms.addAll(ont.getLogicalAxioms());
    }

    for (OWLClass cls : estimatedRoots) {
      this.cls2JustificationMap.put(cls, new HashSet<Explanation>());
      System.out.println("POTENTIAL ROOT: " + cls);
    }
    System.out.println("Finding real roots from " + estimatedRoots.size() + " estimated roots");

    int done = 0;
    this.roots.addAll(estimatedRoots);
    for (OWLClass estimatedRoot : estimatedRoots) {
      try {
        PelletExplanationGenerator gen =
            new PelletExplanationGenerator(manager.createOntology(allAxioms));
        OWLDataFactory df = this.manager.getOWLDataFactory();
        Set<Explanation> expls =
            gen.getExplanations(df.getOWLSubClassOfAxiom(estimatedRoot, df.getOWLNothing()));
        cls2JustificationMap.get(estimatedRoot).addAll(expls);
        ++done;
        System.out.println("Done " + done);
      } catch (OWLOntologyCreationException e) {
        e.printStackTrace();
      }
    }
    for (OWLClass clsA : estimatedRoots) {
      for (OWLClass clsB : estimatedRoots)
        if (!clsA.equals(clsB)) {
          Set<Explanation> clsAExpls = cls2JustificationMap.get(clsA);
          Set<Explanation> clsBExpls = cls2JustificationMap.get(clsB);
          boolean clsARootForClsB = false;
          boolean clsBRootForClsA = false;

          for (Explanation clsAExpl : clsAExpls) {
            for (Explanation clsBExpl : clsBExpls)
              if (isRootFor(clsAExpl, clsBExpl)) {
                clsARootForClsB = true;
              } else if (isRootFor(clsBExpl, clsAExpl)) {
                clsBRootForClsA = true;
              }
          }

          Explanation clsAExpl;
          if ((!clsARootForClsB) || (!clsBRootForClsA))
            if (clsARootForClsB) {
              this.roots.remove(clsB);
            } else if (clsBRootForClsA) this.roots.remove(clsA);
        }
    }

    OWLClass clsA;
    return this.roots;
  }
  public void testRenderAndParse() {
    List<File> renderedFiles = new ArrayList<File>();
    String dblocation = System.getProperty("java.io.tmpdir") + File.separator + "hgdbtest";
    HGDBOntologyManager manager = HGOntologyManagerFactory.getOntologyManager(dblocation);
    HGDBOntologyRepository repo = new HGDBOntologyRepository(dblocation);
    VersionManager versionManager = manager.getVersionManager();
    HyperGraph graph = manager.getOntologyRepository().getHyperGraph();

    //
    // IMPORT AND RENDER
    //
    try {
      // repo.dropHypergraph();
      repo.deleteAllOntologies();
      // System.out.println("Running GC");
      // CANNOT RUN GC nullHANDLE problem !!! repo.runGarbageCollector();
      URL ontologyUrl = this.getClass().getResource("/sampleOntology.owl");
      IRI targetIRI = ImportOntologies.importOntology(ontologyUrl, manager);
      // IRI targetIRI = ImportOntologies.importOntology(f2, manager);
      HGDBOntology o = (HGDBOntology) manager.loadOntologyFromOntologyDocument(targetIRI);
      VersionedOntology vo = versionManager.versioned(o.getAtomHandle());
      // MANIPULATE REMOVE CHANGED
      Object[] axioms = o.getAxioms().toArray();
      // remove all axioms 10.
      for (int i = 0; i < axioms.length / 10; i++) {
        int j = i;
        for (; j < i + axioms.length / 100; j++) {
          if (j < axioms.length) {
            manager.applyChange(new RemoveAxiom(o, (OWLAxiom) axioms[j]));
          }
        }
        i = j;
        vo.commit("SameUser", " commit no " + i);
      }
      // RENDER VERSIONED ONTOLOGY, includes data

      // TODO...
      VOWLXMLRenderConfiguration c = new VOWLXMLRenderConfiguration();
      VOWLXMLVersionedOntologyRenderer r = new VOWLXMLVersionedOntologyRenderer(manager);
      File fx = new File(TESTFILE.getAbsolutePath() + " Revision-" + ".xml");
      renderedFiles.add(fx);
      Writer fwriter = new OutputStreamWriter(new FileOutputStream(fx), Charset.forName("UTF-8"));
      r.render(vo, null, fwriter, c);
      //			for (int i = 0; i < vo.getArity(); i++)
      //			{
      //				VOWLXMLRenderConfiguration c = new VOWLXMLRenderConfiguration();
      //				//c.setLastRevisionIndex(i);
      //				VOWLXMLVersionedOntologyRenderer r = new VOWLXMLVersionedOntologyRenderer(manager);
      //				File fx = new File(TESTFILE.getAbsolutePath() + " Revision-" + i + ".xml");
      //				// File fx = new
      //				// File("C:\\_CiRM\\testontos\\CountyVersioned-Rev-"+ i +
      //				// ".vowlxml");
      //				renderedFiles.add(fx);
      //				// File fx = new File("C:\\_CiRM\\testontos\\1 csr-Rev-"+ i +
      //				// ".vowlxml");
      //				Writer fwriter = new OutputStreamWriter(new FileOutputStream(fx),
      // Charset.forName("UTF-8"));
      //				// Full export
      //				r.render(vo, fwriter, c);
      //			}
      System.out.println("DELETE ALL ONTOLOGIES");
      repo.deleteAllOntologies();
      GarbageCollector gc = new GarbageCollector(repo);
      gc.runGarbageCollection(GarbageCollector.MODE_DELETED_ONTOLOGIES);
    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (OWLRendererException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }

    //
    // PARSE
    //
    File f = new File(TESTFILE.getAbsolutePath() + " Revision-" + 10 + ".xml");
    System.out.println("PARSING: " + f + " length: " + (f.length() / 1024) + " kB");
    OWLOntologyDocumentSource source = new FileDocumentSource(f);
    VOWLXMLParser parser = new VOWLXMLParser();
    OWLOntologyEx onto = new OWLTempOntologyImpl(manager, new OWLOntologyID());
    // must have onto for manager in super class
    VOWLXMLDocument versionedOntologyRoot = new VOWLXMLDocument(onto);
    //
    // Create VersionedOntology Revision 10
    try {
      parser.parse(graph, source, versionedOntologyRoot, new OWLOntologyLoaderConfiguration());
      System.out.println("PARSING FINISHED.");
    } catch (OWLOntologyChangeException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (UnloadableImportException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (OWLParserException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    if (versionedOntologyRoot.getRenderConfig().heads().isEmpty()
        && versionedOntologyRoot
            .getRenderConfig()
            .roots()
            .contains(versionedOntologyRoot.getRenderConfig().firstRevision())) {
      OWLOntologyID ontologyID = versionedOntologyRoot.getRevisionData().getOntologyID();
      IRI documentIRI =
          IRI.create("hgdb://" + ontologyID.getDefaultDocumentIRI().toString().substring(7));
      HGPersistentHandle ontologyUUID =
          repo.getHyperGraph().getHandleFactory().makeHandle(versionedOntologyRoot.getOntologyID());
      try {
        System.out.println("Storing ontology data for : " + ontologyUUID);
        HGDBOntology o =
            manager
                .getOntologyRepository()
                .createOWLOntology(ontologyID, documentIRI, ontologyUUID);
        storeFromTo(versionedOntologyRoot.getRevisionData(), o);
      } catch (HGDBOntologyAlreadyExistsByDocumentIRIException e) {
        e.printStackTrace();
      } catch (HGDBOntologyAlreadyExistsByOntologyIDException e) {
        e.printStackTrace();
      } catch (HGDBOntologyAlreadyExistsByOntologyUUIDException e) {
        e.printStackTrace();
      }
      // Add version control with full matching history.
      System.out.println("Adding version control history to : " + ontologyUUID);
      //			VersionedOntology voParsed = new VersionedOntology(versionedOntologyRoot.getRevisions(),
      //					versionedOntologyRoot.getChangesets(), graph);
      //			VHGDBOntologyRepository vrepo = (VHGDBOntologyRepository)
      // manager.getOntologyRepository();
      //			System.out.println("Versioned Repository Contents: ");
      //			for (VersionedOntology vox : vrepo.getVersionControlledOntologies())
      //			{
      //				System.out.println("Versioned Ontology: " + vox.getWorkingSetData());
      //				System.out.println("Versioned Ontology Revs: " + vox.getNrOfRevisions());
      //			}
      //			//
      //			// Rendering FULL Versioned Ontology
      //			//
      //			System.out.println("Rendering full versioned ontology after parse and store: " +
      // ontologyUUID);
      //			VOWLXMLVersionedOntologyRenderer r = new VOWLXMLVersionedOntologyRenderer(manager);
      //			File fx = new File(TESTFILE.getAbsolutePath() + "FULL-afterParse.xml");
      //			Writer fwriter;
      //			try
      //			{
      //				fwriter = new OutputStreamWriter(new FileOutputStream(fx), Charset.forName("UTF-8"));
      //				r.render(voParsed, fwriter);
      //			}
      //			catch (IOException e)
      //			{
      //				e.printStackTrace();
      //			}
      //			catch (OWLRendererException e)
      //			{
      //				e.printStackTrace();
      //			}
    } else {
      System.out.println("ERROR: EXPECTING COMPLETE VERSIONED ONTOLOGY");
    }
    // }
  }
Esempio n. 18
0
  public OWLOntology findLsignature(
      OWLOntology ontology, LogicFragment fragment, Statistics stats) {
    Timer t = new Timer();
    this.stats = stats;
    Logger_MORe.logInfo("extracting " + fragment.toString() + "-signature");
    OWLOntology ret = null;
    OWLOntologyManager manager = ontology.getOWLOntologyManager();
    try {
      ret = manager.createOntology();
      manager.addAxioms(ret, ontology.getAxioms());
    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    }
    lSignatureClasses = new HashSet<OWLClass>();
    lSignatureOther = new HashSet<OWLEntity>();
    compSignatureClasses = new HashSet<OWLClass>();
    compSignatureOther = new HashSet<OWLEntity>();

    LsignatureExtractorLauncher elkSignatureExtractorLauncher = null;
    LsignatureExtractorLauncher elkSignatureExtractorIntegratingRangesLauncher = null;
    LsignatureExtractorViaInverseRewritingLauncher elkSignatureExtractorRewritingInversesLauncher =
        null;

    ForkJoinPool executor = new ForkJoinPool();
    elkSignatureExtractorLauncher =
        new LsignatureExtractorLauncher(ontology, LogicFragment.ELK, false);
    executor.execute(elkSignatureExtractorLauncher);

    if (ret != null) {
      // otherwise we have nowhere to return the axioms in the normalised ontologies necessary to
      // really classify all the extra classses in the lSignature
      if (rewriteInverses) {
        elkSignatureExtractorRewritingInversesLauncher =
            new LsignatureExtractorViaInverseRewritingLauncher(ontology, LogicFragment.ELK);
        executor.execute(elkSignatureExtractorRewritingInversesLauncher);
      }
      if (integrateRanges) {
        elkSignatureExtractorIntegratingRangesLauncher =
            new LsignatureExtractorLauncher(ontology, LogicFragment.ELK, true);
        executor.execute(elkSignatureExtractorIntegratingRangesLauncher);
      }

      // check the output of the normal ELKsignature and cancel the other threads if the lSig is the
      // whole signature
      initialiseLsignature((LsignatureExtractor) elkSignatureExtractorLauncher.join());

      if (compSignatureClasses.isEmpty())
        cancelTasks(
            elkSignatureExtractorIntegratingRangesLauncher,
            elkSignatureExtractorRewritingInversesLauncher);
      else {
        if (elkSignatureExtractorRewritingInversesLauncher != null
            && extendLsignature(
                    (LsignatureExtractor) elkSignatureExtractorRewritingInversesLauncher.join())
                > 0) {
          manager.addAxioms(
              ret,
              ((LsignatureExtractorViaInverseRewritingLauncher)
                      elkSignatureExtractorRewritingInversesLauncher)
                  .getOntology()
                  .getAxioms());
        }
        if (compSignatureClasses.isEmpty())
          cancelTasks(elkSignatureExtractorRewritingInversesLauncher);
        else if (elkSignatureExtractorIntegratingRangesLauncher != null
            && extendLsignature(
                    (LsignatureExtractor) elkSignatureExtractorIntegratingRangesLauncher.join())
                > 0) {
          manager.addAxioms(
              ret,
              ((LsignatureExtractorLauncher) elkSignatureExtractorIntegratingRangesLauncher)
                  .getOntology()
                  .getAxioms());
        }
      }
      stats.updateLsignatureSize(lSignatureClasses.size(), true);
    } else {
      ret = ontology;
      initialiseLsignature((LsignatureExtractor) elkSignatureExtractorLauncher.join());
    }

    Logger_MORe.logInfo(lSignatureClasses.size() + "classes in lSignature");
    Logger_MORe.logDebug(lSignatureClasses.toString());
    Logger_MORe.logInfo(compSignatureClasses.size() + "classes in compSignature");

    // might be a good idea to try to isolate extra axioms in the normalisation/rewriting - is this
    // possible/worth the effort?
    // check the order in which we try to extend the lSignature with each of the rewritten
    // ontologies and consider if one may be better that the other
    Logger_MORe.logDebug(t.duration() + "s to find Lsignature");

    return ret;
  }
Esempio n. 19
0
    @Override
    protected boolean exec() {
      Timer t = new Timer();
      if (ontology == null) {
        extractor = null;
        return true;
      }
      IRI iri =
          IRI.create("http://www.cs.ox.ac.uk/isg/tools/MORe/ontologies/inverseRewritingModule.owl");
      extractor.findLsignature(ontology, LogicFragment.SHOIQ);
      if (containsNonInternalClasses(
          extractor
              .getCompSignature())) { // then the ontology goes beyond SHOIQ and we need to work
        // with a SHOIQ module rather than the whole ontology
        Set<OWLEntity> aux = getNonInternalClasses(extractor.getLsignature());
        if (aux.isEmpty()) {
          extractor = null;
          Logger_MORe.logDebug(
              t.duration()
                  + "s to find Lsignature with inverseRewriting (failed - empty SHOIQ-signature)");
          return true;
        }
        SyntacticLocalityModuleExtractor moduleExtractor =
            new SyntacticLocalityModuleExtractor(manager, ontology, ModuleType.BOT);
        try {
          //					ontology = manager.createOntology(iri);
          //					manager.addAxioms(ontology, moduleExtractor.extract(aux));
          ontology = moduleExtractor.extractAsOntology(aux, iri);
        } catch (OWLOntologyCreationException e1) {
          extractor = null;
          e1.printStackTrace();
          Logger_MORe.logDebug(
              t.duration()
                  + "s to find Lsignature with inverseRewriting (failed - exception creating a SHOIQ module)");
          return true;
        }
      }

      // if we get this far then we have a nonempty ontology (maybe module) that we need to
      // normalize and then rewrite
      OWLNormalization4MORe normalization = new OWLNormalization4MORe(ontology, true, true, true);
      Rewriter rewriter =
          new Rewriter(normalization.getNormalizedOntology(), normalization.getSortedGCIs());
      if (manager.contains(iri)) manager.removeOntology(ontology);
      Set<OWLAxiom> rewrittenAxioms = rewriter.getRewrittenOntology();
      if (!rewriter.anyRewrittenRoles()) {
        extractor = null;
        Logger_MORe.logDebug(
            t.duration()
                + "s to find Lsignature with inverseRewriting (failed - could not rewrite any roles)");
        return true;
      }
      try {
        ontology = manager.createOntology();
        manager.addAxioms(ontology, rewrittenAxioms);
        extractor = new LsignatureExtractor_reducedGreedyness();
        extractor.findLsignature(ontology, fragment);
      } catch (OWLOntologyCreationException e1) {
        extractor = null;
        e1.printStackTrace();
        Logger_MORe.logDebug(
            t.duration()
                + "s to find Lsignature with inverseRewriting (failed - exception creating ontology for rewritten axioms)");
        return true;
      }
      Logger_MORe.logDebug(t.duration() + "s to find Lsignature with inverseRewriting");
      return true;
    }
Esempio n. 20
0
  @SuppressWarnings("unchecked")
  public String execute() {

    String sawsdlExt = ".sawsdl";
    String wsdlExt = ".wsdl";
    String sawadlExt = ".sawadl";
    String wadlExt = ".wadl";
    StringBuffer buf = new StringBuffer();

    SAXBuilder sbuilder = new SAXBuilder();
    Document doc = null;

    @SuppressWarnings("rawtypes")
    Map session = ActionContext.getContext().getSession();
    String importURL = "";
    errormsg = "";
    String filename = "";

    System.out.println("wsloc = " + wsloc);
    if (WSFile != null) System.out.println("OWLFile size = " + WSFile.getTotalSpace());

    try {
      XMLParser wsParser = null;
      session.remove("wsname");
      session.remove("wsdlparser");
      session.remove("wadlparser");
      if (wsloc.indexOf("http:") != -1) {
        importURL = wsloc;
        if (wsloc.equalsIgnoreCase(wsdlExt) || wsloc.equalsIgnoreCase(sawsdlExt)) {
          doc = sbuilder.build(importURL);
          wsParser = new SAWSDLParser(doc);
          session.put("wsdlparser", wsParser);
          session.remove("wadlparser");
          int start = importURL.lastIndexOf("/");
          filename = importURL.substring(start, importURL.length());
          session.put("wsname", filename);
        } else if (wsloc.equalsIgnoreCase(wadlExt) || wsloc.equalsIgnoreCase(sawadlExt)) {
          doc = sbuilder.build(importURL);
          wsParser = new WADLParser(doc);
          session.put("wadlparser", wsParser);
          session.remove("wsdlparser");
          int start = importURL.lastIndexOf("/");
          filename = importURL.substring(start, importURL.length());
          session.put("wsname", filename);
        }
      } else {
        if (WSFile != null) {
          if (wsloc.endsWith(wsdlExt) || wsloc.endsWith(sawsdlExt)) {
            doc = sbuilder.build(WSFile);
            wsParser = new SAWSDLParser(doc);
            session.put("wsdlparser", wsParser);
            filename = wsloc;
            session.put("wsname", filename);
          } else if (wsloc.endsWith(wadlExt) || wsloc.endsWith(sawadlExt)) {
            doc = sbuilder.build(WSFile);
            wsParser = new WADLParser(doc);
            session.put("wadlparser", wsParser);
            filename = wsloc;
            session.put("wsname", filename);
          } else {
            errormsg = "File is not wsdl or wadl file.";
          }
        } else {
          errormsg = "WSDL file lost.";
        }
      }

      if (wsParser == null) {
        errormsg = "WSDL is invalidate";
        return ERROR;
      }

      if (isWSDL(doc)) {

        boolean hasSAWSDLNS = false;
        @SuppressWarnings("rawtypes")
        List nameSpaces = doc.getRootElement().getAdditionalNamespaces();
        for (int i = 0; i < nameSpaces.size(); i++) {
          Namespace ns = (Namespace) nameSpaces.get(i);
          if (ns.getURI().equalsIgnoreCase(SAWSDLParser.sawsdlNS.getURI())) {
            hasSAWSDLNS = true;
            break;
          }
        }
        if (!hasSAWSDLNS) {
          doc.getRootElement().addNamespaceDeclaration(SAWSDLParser.sawsdlNS);
        }

        boolean wsdlV1 = ((SAWSDLParser) wsParser).isWsdlV1();
        if (wsdlV1 == true) {
          LoadWSDLTree.loadWSDL((SAWSDLParser) wsParser, buf, filename);
        } else {
          // not implement yet
        }
        innerTreeHtml = buf.toString();
        type = "wsdl";
      } else if (isWADL(doc)) {
        // not implement yet
        LoadWADLTree.loadWADL((WADLParser) wsParser, buf, wsloc);
        innerTreeHtml = buf.toString();
        type = "wadl";
      }

    } catch (IOException e) {
      e.printStackTrace();
      errormsg = e.toString();
    } catch (URISyntaxException e) {
      e.printStackTrace();
      errormsg = e.toString();
    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
      errormsg = e.toString();
    } catch (JDOMException e) {
      e.printStackTrace();
      errormsg = e.toString();
    } catch (Exception e) {
      e.printStackTrace();
      errormsg = e.toString();
    }

    System.out.println("errormsg = " + errormsg);

    System.out.println("draw finish");

    return SUCCESS;
  }
Esempio n. 21
0
  public static void main(String[] args) {

    try {
      // Create our ontology manager in the usual way.
      OWLOntologyManager manager = OWLManager.createOWLOntologyManager();

      // Load a copy of the people+pets ontology.  We'll load the ontology from the web (it's
      // acutally located
      // in the TONES ontology repository).
      IRI docIRI = IRI.create(DOCUMENT_IRI);
      // We load the ontology from a document - our IRI points to it directly
      OWLOntology ont = manager.loadOntologyFromOntologyDocument(docIRI);
      System.out.println("Loaded " + ont.getOntologyID());

      // We need to create an instance of OWLReasoner.  An OWLReasoner provides the basic
      // query functionality that we need, for example the ability obtain the subclasses
      // of a class etc.  To do this we use a reasoner factory.

      // Create a reasoner factory.  In this case, we will use HermiT, but we could also
      // use FaCT++ (http://code.google.com/p/factplusplus/) or
      // Pellet(http://clarkparsia.com/pellet)
      // Note that (as of 03 Feb 2010) FaCT++ and Pellet OWL API 3.0.0 compatible libraries are
      // expected to be available in the near future).

      // For now, we'll use HermiT
      // HermiT can be downloaded from http://hermit-reasoner.com
      // Make sure you get the HermiT library and add it to your class path.  You can then
      // instantiate the HermiT reasoner factory:
      // Comment out the first line below and uncomment the second line below to instantiate
      // the HermiT reasoner factory.  You'll also need to import the
      // org.semanticweb.HermiT.Reasoner
      // package.
      OWLReasonerFactory reasonerFactory = null;
      //            OWLReasonerFactory reasonerFactory = new Reasoner.ReasonerFactory();

      // We'll now create an instance of an OWLReasoner (the implementation being provided by HermiT
      // as
      // we're using the HermiT reasoner factory).  The are two categories of reasoner, Buffering
      // and
      // NonBuffering.  In our case, we'll create the buffering reasoner, which is the default kind
      // of reasoner.
      // We'll also attach a progress monitor to the reasoner.  To do this we set up a configuration
      // that
      // knows about a progress monitor.

      // Create a console progress monitor.  This will print the reasoner progress out to the
      // console.
      ConsoleProgressMonitor progressMonitor = new ConsoleProgressMonitor();
      // Specify the progress monitor via a configuration.  We could also specify other setup
      // parameters in
      // the configuration, and different reasoners may accept their own defined parameters this
      // way.
      OWLReasonerConfiguration config = new SimpleConfiguration(progressMonitor);
      // Create a reasoner that will reason over our ontology and its imports closure.  Pass in the
      // configuration.
      OWLReasoner reasoner = reasonerFactory.createReasoner(ont, config);

      // Ask the reasoner to do all the necessary work now
      reasoner.precomputeInferences();

      // We can determine if the ontology is actually consistent (in this case, it should be).
      boolean consistent = reasoner.isConsistent();
      System.out.println("Consistent: " + consistent);
      System.out.println("\n");

      // We can easily get a list of unsatisfiable classes.  (A class is unsatisfiable if it
      // can't possibly have any instances).  Note that the getUnsatisfiableClasses method
      // is really just a convenience method for obtaining the classes that are equivalent
      // to owl:Nothing.  In our case there should be just one unsatisfiable class - "mad_cow"
      // We ask the reasoner for the unsatisfiable classes, which returns the bottom node
      // in the class hierarchy (an unsatisfiable class is a subclass of every class).
      Node<OWLClass> bottomNode = reasoner.getUnsatisfiableClasses();
      // This node contains owl:Nothing and all the classes that are equivalent to owl:Nothing -
      // i.e. the unsatisfiable classes.
      // We just want to print out the unsatisfiable classes excluding owl:Nothing, and we can
      // used a convenience method on the node to get these
      Set<OWLClass> unsatisfiable = bottomNode.getEntitiesMinusBottom();
      if (!unsatisfiable.isEmpty()) {
        System.out.println("The following classes are unsatisfiable: ");
        for (OWLClass cls : unsatisfiable) {
          System.out.println("    " + cls);
        }
      } else {
        System.out.println("There are no unsatisfiable classes");
      }
      System.out.println("\n");

      // Now we want to query the reasoner for all descendants of vegetarian.  Vegetarians are
      // defined in the
      // ontology to be animals that don't eat animals or parts of animals.
      OWLDataFactory fac = manager.getOWLDataFactory();
      // Get a reference to the vegetarian class so that we can as the reasoner about it.
      // The full IRI of this class happens to be:
      // <http://owl.man.ac.uk/2005/07/sssw/people#vegetarian>
      OWLClass vegPizza =
          fac.getOWLClass(IRI.create("http://owl.man.ac.uk/2005/07/sssw/people#vegetarian"));

      // Now use the reasoner to obtain the subclasses of vegetarian.
      // We can ask for the direct subclasses of vegetarian or all of the (proper) subclasses of
      // vegetarian.
      // In this case we just want the direct ones (which we specify by the "true" flag).
      NodeSet<OWLClass> subClses = reasoner.getSubClasses(vegPizza, true);

      // The reasoner returns a NodeSet, which represents a set of Nodes.
      // Each node in the set represents a subclass of vegetarian pizza.  A node of classes contains
      // classes,
      // where each class in the node is equivalent. For example, if we asked for the
      // subclasses of some class A and got back a NodeSet containing two nodes {B, C} and {D}, then
      // A would have
      // two proper subclasses.  One of these subclasses would be equivalent to the class D, and the
      // other would
      // be the class that is equivalent to class B and class C.

      // In this case, we don't particularly care about the equivalences, so we will flatten this
      // set of sets and print the result
      Set<OWLClass> clses = subClses.getFlattened();
      System.out.println("Subclasses of vegetarian: ");
      for (OWLClass cls : clses) {
        System.out.println("    " + cls);
      }
      System.out.println("\n");

      // In this case, we should find that the classes, cow, sheep and giraffe are vegetarian.  Note
      // that in this
      // ontology only the class cow had been stated to be a subclass of vegetarian.  The fact that
      // sheep and
      // giraffe are subclasses of vegetarian was implicit in the ontology (through other things we
      // had said)
      // and this illustrates why it is important to use a reasoner for querying an ontology.

      // We can easily retrieve the instances of a class.  In this example we'll obtain the
      // instances of
      // the class pet.  This class has a full IRI of <http://owl.man.ac.uk/2005/07/sssw/people#pet>

      // We need to obtain a reference to this class so that we can ask the reasoner about it.
      OWLClass country =
          fac.getOWLClass(IRI.create("http://owl.man.ac.uk/2005/07/sssw/people#pet"));
      // Ask the reasoner for the instances of pet
      NodeSet<OWLNamedIndividual> individualsNodeSet = reasoner.getInstances(country, true);
      // The reasoner returns a NodeSet again.  This time the NodeSet contains individuals.
      // Again, we just want the individuals, so get a flattened set.
      Set<OWLNamedIndividual> individuals = individualsNodeSet.getFlattened();
      System.out.println("Instances of pet: ");
      for (OWLNamedIndividual ind : individuals) {
        System.out.println("    " + ind);
      }
      System.out.println("\n");

      // Again, it's worth noting that not all of the individuals that are returned were explicitly
      // stated
      // to be pets.

      // Finally, we can ask for the property values (property assertions in OWL speak) for a given
      // individual
      // and property.
      // Let's get the property values for the individual Mick, the full IRI of which is
      // <http://owl.man.ac.uk/2005/07/sssw/people#Mick>

      // Get a reference to the individual Mick
      OWLNamedIndividual mick =
          fac.getOWLNamedIndividual(IRI.create("http://owl.man.ac.uk/2005/07/sssw/people#Mick"));

      // Let's get the pets of Mick
      // Get hold of the has_pet property which has a full IRI of
      // <http://owl.man.ac.uk/2005/07/sssw/people#has_pet>
      OWLObjectProperty hasPet =
          fac.getOWLObjectProperty(IRI.create("http://owl.man.ac.uk/2005/07/sssw/people#has_pet"));

      // Now ask the reasoner for the has_pet property values for Mick
      NodeSet<OWLNamedIndividual> petValuesNodeSet = reasoner.getObjectPropertyValues(mick, hasPet);
      Set<OWLNamedIndividual> values = petValuesNodeSet.getFlattened();
      System.out.println("The has_pet property values for Mick are: ");
      for (OWLNamedIndividual ind : values) {
        System.out.println("    " + ind);
      }

      // Notice that Mick has a pet Rex, which wasn't asserted in the ontology.

      // Finally, let's print out the class hierarchy.
      // Get hold of the top node in the class hierarchy (containing owl:Thing)
      // Now print the hierarchy out
      Node<OWLClass> topNode = reasoner.getTopClassNode();
      print(topNode, reasoner, 0);

    } catch (UnsupportedOperationException exception) {
      System.out.println("Unsupported reasoner operation.");
    } catch (OWLOntologyCreationException e) {
      System.out.println("Could not load the pizza ontology: " + e.getMessage());
    }
  }
  private IRI executeRule(final Rule r, final IRI inputIRI) {
    try {
      PelletOptions.USE_ANNOTATION_SUPPORT = true;

      PelletOptions.TREAT_ALL_VARS_DISTINGUISHED = controller.isTreatAllVariablesDistinguished();

      QueryEngineType type = (QueryEngineType) controller.getQueryEngineType();

      final QueryExecution qe;
      final ByteArrayOutputStream w = new ByteArrayOutputStream();

      final Query qSelect = getSelectExampleQuery(r.getQuery());

      if (type.toPellet() != null) {
        final OWLOntology queryOntology = getInputOntologyForRule(inputIRI);

        final PelletReasoner reasoner =
            PelletReasonerFactory.getInstance().createReasoner(queryOntology);

        log.info("Ontology size: " + reasoner.getKB().getInfo());

        final Dataset ds = kb2ds(reasoner.getKB());

        final QueryExecution qeSelect =
            SparqlDLExecutionFactory.create(qSelect, ds, null, type.toPellet());

        final ResultSet rs = qeSelect.execSelect();
        controller.setSelect(r, rs.getResultVars(), ResultSetFormatter.toList(rs));

        qe =
            SparqlDLExecutionFactory.create(
                r.getQuery(), kb2ds(reasoner.getKB()), null, type.toPellet());
        qe.execConstruct().write(w);
      } else {
        final ByteArrayOutputStream w2 = new ByteArrayOutputStream();
        final Model model = ModelFactory.createDefaultModel();
        try {
          controller
              .getOWLOntologyManager()
              .saveOntology(queryOntology, new TurtleOntologyFormat(), w2);
          model.read(new ByteArrayInputStream(w2.toByteArray()), "", "TURTLE");

          final QueryExecution qeSelect = QueryExecutionFactory.create(qSelect, model);

          final ResultSet rs = qeSelect.execSelect();
          controller.setSelect(r, rs.getResultVars(), ResultSetFormatter.toList(rs));

          qe = QueryExecutionFactory.create(r.getQuery(), model);
          qe.execConstruct().write(w);
        } catch (OWLOntologyStorageException e) {
          // TODO Auto-generated catch block
          e.printStackTrace();
        }
      }

      final IRI outputIRI = getOntologyIRIForRuleName(r.getName());

      // loaded generated ontology
      final OWLOntology generatedOntology =
          controller
              .getOWLOntologyManager()
              .loadOntologyFromOntologyDocument(new ByteArrayInputStream(w.toByteArray()));
      controller.updateOntology(
          generatedOntology,
          outputIRI,
          inputIRI,
          controller.getRuleSpec().getResultFile(r).toURI());
      controller.setStatus("Rule " + r.getName() + " successfully executed");
      return outputIRI;
    } catch (OWLOntologyCreationException e1) {
      controller.setStatus(e1.getMessage());
      return null;
    }
  }