/**
   * Initialise les composants du conteneur sémantique une fois les URI des ontologies dispatchées
   * par OwlDistributionInitializer.
   */
  public void semInit() throws ExceptionInInitializerError {
    if (isInitialized()) {
      /** Initialisation de l'ontologie locale. Cette ontologie n'est plus modifiée ensuite. */
      // Initialisation du manager de l'ontologie locale. N'est utilisé qu'ici.
      OWLOntologyManager localOntoManager = OWLManager.createOWLOntologyManager();
      OWLOntologyIRIMapper localOntoMapper =
          new SimpleIRIMapper(getOntologyIri(), getPhysicalIri());
      localOntoManager.addIRIMapper(localOntoMapper);
      // Initialisation de l'ontologie
      try {
        // On crée l'ontologie locale.
        this.localOntology = localOntoManager.loadOntology(getPhysicalIri());
      } catch (OWLOntologyCreationException e) {
        e.printStackTrace();
      }
      // System.out.println("localOntology : " + localOntology + " ; pyhsicalIri : " +
      // getPhysicalIri().toString());

      /**
       * Initialisation de l'ontologie du reasoner, qui contiendra l'ontologie locale plus d'autres
       * axiomes éventuels. On crée l'instance, qu'on rattache à un manager et un reasoner
       * accessibles par des getters. L'instance reste la même, mais le contenu change.
       */
      // Initialisation du manager de l'ontologie du reasoner
      this.manager = OWLManager.createOWLOntologyManager();
      // On crée une deuxième ontologie.
      try {
        IRI ontoName = IRI.create(this.nameSpace);
        OWLOntologyID ontoId = new OWLOntologyID(ontoName);
        this.reasoningOntology = this.manager.createOntology(ontoId);
      } catch (OWLOntologyCreationException e) {
        e.printStackTrace();
      }
      // Initialisation du reasoner
      PelletReasonerFactory reasonerFactory = PelletReasonerFactory.getInstance();

      PelletOptions.USE_INCREMENTAL_CONSISTENCY = true;
      PelletOptions.USE_COMPLETION_QUEUE = true;

      // PelletReasoner reasoner = reasonerFactory.createReasoner(reasoningOntology);
      PelletReasoner reasoner = reasonerFactory.createNonBufferingReasoner(reasoningOntology);

      // add the reasoner as an ontology change listener
      this.manager.addOntologyChangeListener(reasoner);

      reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);
      /*			reasoner.precomputeInferences(InferenceType.CLASS_ASSERTIONS);
      			reasoner.precomputeInferences(InferenceType.DATA_PROPERTY_HIERARCHY);
      			reasoner.precomputeInferences(InferenceType.DISJOINT_CLASSES);
      			reasoner.precomputeInferences(InferenceType.OBJECT_PROPERTY_HIERARCHY);
      			reasoner.precomputeInferences(InferenceType.SAME_INDIVIDUAL);

      */ this.reasoner = reasoner;
    } else
      throw new ExceptionInInitializerError(
          "Paramètres de l'ontologie non initialisés ; ontoParamsInit = "
              + Reflection.getCallerClass(2));

    // initOntoReasoner();
  }
 public OWLOntology getMergedOntology() {
   final IRI mergedOntologyIRI =
       IRI.create(queryOntology.getOntologyID().getDefaultDocumentIRI() + "-merged");
   final OWLOntologyManager mm = controller.getOWLOntologyManager();
   if (mm.contains(mergedOntologyIRI)) {
     return mm.getOntology(mergedOntologyIRI);
   } else {
     try {
       final OWLOntology mergedOntology = mm.createOntology(mergedOntologyIRI);
       mm.setOntologyFormat(mergedOntology, new RDFXMLOntologyFormat());
       final String mergedOntologyFileName =
           mergedOntologyIRI
                   .toURI()
                   .toString()
                   .substring(mergedOntologyIRI.toURI().toString().lastIndexOf("/") + 1)
               + ".owl";
       mm.setOntologyDocumentIRI(
           mergedOntology,
           IRI.create(
               controller.getRuleSpec().getOutputDir().toURI() + "/" + mergedOntologyFileName));
       mm.applyChange(
           new AddImport(
               mergedOntology,
               mm.getOWLDataFactory()
                   .getOWLImportsDeclaration(
                       queryOntology.getOntologyID().getDefaultDocumentIRI())));
       return mergedOntology;
     } catch (OWLOntologyCreationException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
       return null;
     }
   }
 }
 @Override
 protected boolean exec() {
   Timer t = new Timer();
   if (ontology == null) {
     lSigExtractor = null;
     return true;
   }
   if (integrateRangesFirst) {
     OWLNormalization4MORe normalization =
         new OWLNormalization4MORe(ontology, true, false, false);
     Set<OWLAxiom> axioms = normalization.getNormalizedOntology();
     try {
       OWLOntologyManager manager = ontology.getOWLOntologyManager();
       ontology = manager.createOntology();
       manager.addAxioms(ontology, axioms);
     } catch (OWLOntologyCreationException e) {
       e.printStackTrace();
       lSigExtractor = null;
       return true;
     }
   }
   lSigExtractor.findLsignature(ontology, fragment);
   if (!integrateRangesFirst) stats.updateNelkAxioms(lSigExtractor.nAxiomsInFragment());
   Logger_MORe.logDebug(
       t.duration() + "s to find Lsignature with integrateRangesFirst=" + integrateRangesFirst);
   return true;
 }
 private TaxonStore getStore(String targetStr, String prefixStr, String formatStr) {
   File targetFile = getSourceFile(targetStr);
   if (targetFile.exists()) targetFile.delete();
   if (OBOFORMATSTR.equals(formatStr)) {
     return new OBOStore(
         targetFile.getAbsolutePath(), prefixStr, prefixStr.toLowerCase() + NAMESPACESUFFIX);
   }
   if (OWLFORMATSTR.equals(formatStr)) {
     try {
       return new OWLStore(
           targetFile.getAbsolutePath(), prefixStr, prefixStr.toLowerCase() + NAMESPACESUFFIX);
     } catch (OWLOntologyCreationException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
     }
   }
   // these source formats aren't storage formats (there's no ontology library for them) so the
   // store is implementation dependent (currently OBO)
   if (XREFFORMATSTR.equals(formatStr)
       || // XREF isn't a storage format, so the store is
       COLUMNFORMATSTR.equals(formatStr)
       || SYNONYMFORMATSTR.equals(formatStr)
       || ALLCOLUMNSFORMATSTR.equals(formatStr)) {
     return new OBOStore(
         targetFile.getAbsolutePath(), prefixStr, prefixStr.toLowerCase() + NAMESPACESUFFIX);
   }
   logger.error("Format " + formatStr + " not supported for merging");
   return null;
 }
  public static void main(String[] args) {
    try {
      SimpleRenderer renderer = new SimpleRenderer();
      renderer.setShortFormProvider(
          new DefaultPrefixManager("http://www.mindswap.org/ontologies/tambis-full.owl#"));
      ToStringRenderer.getInstance().setRenderer(renderer);
      OWLOntologyManager man = OWLManager.createOWLOntologyManager();
      OWLOntology ont =
          man.loadOntology(
              IRI.create(
                  "http://owl.cs.manchester.ac.uk/repository/download?ontology=http://www.cs.manchester.ac.uk/owl/ontologies/tambis-patched.owl"));

      System.out.println("Loaded!");
      OWLReasonerFactory reasonerFactory = PelletReasonerFactory.getInstance();
      OWLReasoner reasoner = reasonerFactory.createNonBufferingReasoner(ont);
      reasoner.getUnsatisfiableClasses();
      ExplanationBasedRootClassFinder rdr =
          new ExplanationBasedRootClassFinder(man, reasoner, reasonerFactory);
      for (OWLClass cls : rdr.getRootUnsatisfiableClasses()) System.out.println("ROOT! " + cls);
    } catch (TimeOutException e) {
      e.printStackTrace();
    } catch (ReasonerInterruptedException e) {
      e.printStackTrace();
    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    }
  }
  @Test
  public void test03() {
    File file = new File("TestData/EliminateTransTest03.owl");
    OWLOntologyManager man = OWLManager.createOWLOntologyManager();
    OWLOntology ontology;
    try {
      ontology = man.loadOntologyFromOntologyDocument(file);

      HornSHIQNormalizer normalizer = new HornSHIQNormalizer();
      OWLOntology normalizedOnt = normalizer.normalize(ontology);

      HornALCHIQTransNormalizer normalizer1 = new HornALCHIQTransNormalizer();
      OWLOntology normalizedOnt1 = normalizer1.normalize(normalizedOnt);

      HornALCHIQNormalizer normalizer2 = new HornALCHIQNormalizer();
      OWLOntology normalizedOnt2 = normalizer2.normalize(normalizedOnt1);
      man.saveOntology(
          normalizedOnt2, IRI.create(new File("TestData/EliminateTransTest03Norm.owl")));
    } catch (OWLOntologyCreationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (OWLOntologyStorageException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
  }
  private void runWithSeparateFiles() {
    if (owlFile == null) {
      throw new NullPointerException("You have to specify an ontology file!");
    }

    OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    OWLOntology ontology = null;
    OBDADataFactory obdaDataFactory = OBDADataFactoryImpl.getInstance();
    try {
      ontology = manager.loadOntologyFromOntologyDocument((new File(owlFile)));

      if (disableReasoning) {
        /*
         * when reasoning is disabled, we extract only the declaration assertions for the vocabulary
         */
        ontology = extractDeclarations(manager, ontology);
      }

      Collection<Predicate> predicates = new ArrayList<>();

      for (OWLClass owlClass : ontology.getClassesInSignature()) {
        Predicate predicate = obdaDataFactory.getClassPredicate(owlClass.getIRI().toString());
        predicates.add(predicate);
      }
      for (OWLDataProperty owlDataProperty : ontology.getDataPropertiesInSignature()) {
        Predicate predicate =
            obdaDataFactory.getDataPropertyPredicate(owlDataProperty.getIRI().toString());
        predicates.add(predicate);
      }
      for (OWLObjectProperty owlObjectProperty : ontology.getObjectPropertiesInSignature()) {
        Predicate predicate =
            obdaDataFactory.getObjectPropertyPredicate(owlObjectProperty.getIRI().toString());
        predicates.add(predicate);
      }

      OBDAModel obdaModel = loadMappingFile(mappingFile);

      Ontology inputOntology = OWLAPI3TranslatorUtility.translate(ontology);

      obdaModel.declareAll(inputOntology.getVocabulary());

      int numPredicates = predicates.size();

      int i = 1;
      for (Predicate predicate : predicates) {
        System.err.println(String.format("Materializing %s (%d/%d)", predicate, i, numPredicates));
        serializePredicate(ontology, inputOntology, obdaModel, predicate, outputFile, format);
        i++;
      }

    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
 public PartOfOntology(String directory, String file) {
   OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
   try {
     ontology = manager.loadOntologyFromOntologyDocument(new File(directory, file));
   } catch (OWLOntologyCreationException e) {
     e.printStackTrace();
     log(LogLevel.DEBUG, "Failed to load file " + file + " from directory " + directory);
     return;
   }
   success = true;
 }
 public LsignatureExtractorViaInverseRewritingLauncher(
     OWLOntology ontology, LogicFragment fragment) {
   this.ontology = null;
   try {
     manager = ontology.getOWLOntologyManager();
     this.ontology = manager.createOntology();
     manager.addAxioms(this.ontology, ontology.getAxioms());
   } catch (OWLOntologyCreationException e) {
     e.printStackTrace();
   }
   this.fragment = fragment;
 }
  protected OWLOntology loadBigOntology(String filename) {

    File file = new File(System.getenv("bigontosdir") + filename + ".owl");
    OWLOntology ontology = null;
    try {
      ontology = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(file);
    } catch (OWLOntologyCreationException e) {
      e
          .printStackTrace(); // To change body of catch statement use File | Settings | File
                              // Templates.
    }
    return ontology;
  }
 public LsignatureExtractorLauncher(
     OWLOntology ontology, LogicFragment fragment, boolean integrateRangesFirst) {
   this.ontology = null;
   try {
     OWLOntologyManager manager = ontology.getOWLOntologyManager();
     this.ontology = manager.createOntology();
     manager.addAxioms(this.ontology, ontology.getAxioms());
   } catch (OWLOntologyCreationException e) {
     e.printStackTrace();
   }
   this.fragment = fragment;
   this.integrateRangesFirst = integrateRangesFirst;
 }
  @Override
  public boolean test() {
    // Load an example ontology.
    OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    OWLOntology ontology = null;
    try {
      ontology = manager.loadOntologyFromOntologyDocument(new File("model/mcs_ontology.owl"));
    } catch (OWLOntologyCreationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    // We need a reasoner to do our query answering

    // These two lines are the only relevant difference between this code
    // and the original example
    // This example uses HermiT: http://hermit-reasoner.com/
    OWLReasoner reasoner = new Reasoner.ReasonerFactory().createReasoner(ontology);

    ShortFormProvider shortFormProvider = new SimpleShortFormProvider();
    // Create the DLQueryPrinter helper class. This will manage the
    // parsing of input and printing of results
    DLQueryPrinter dlQueryPrinter =
        new DLQueryPrinter(new DLQueryEngine(reasoner, shortFormProvider), shortFormProvider);
    // Enter the query loop. A user is expected to enter class
    // expression on the command line.
    BufferedReader br = null;
    try {
      br = new BufferedReader(new InputStreamReader(System.in, "UTF-8"));
    } catch (UnsupportedEncodingException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    while (true) {
      System.out.println(
          "Type a class expression in Manchester Syntax and press Enter (or press x to exit):");
      String classExpression = null;
      try {
        classExpression = br.readLine();
      } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }
      // Check for exit condition
      if (classExpression == null || classExpression.equalsIgnoreCase("x")) {
        break;
      }
      dlQueryPrinter.askQuery(classExpression.trim());
      System.out.println();
    }
    return true;
  }
 @Override
 public boolean prepare() {
   // Create the manager
   manager = OWLManager.createOWLOntologyManager();
   // File with an existing ontology - make sure it's there!
   fClass = new File("model/mcs_ontology.owl");
   // Load the ontology from the file
   try {
     ontology = manager.loadOntologyFromOntologyDocument(fClass);
   } catch (OWLOntologyCreationException e) {
     // TODO Auto-generated catch block
     e.printStackTrace();
   }
   factory = new MyFactory(ontology);
   return true;
 }
  @Override
  public boolean test() {
    // Check if the ontology contains any axioms
    System.out.println("Number of axioms: " + ontology.getAxiomCount());
    // Every ontology has a unique ID.
    System.out.println("Current Ontology ID: " + ontology.getOntologyID());
    // test of CRUD
    // test of Create
    System.out.println("Number of children: " + factory.getAllChildInstances().size());
    System.out.println("Create a new child ");
    factory.createChild("Nicola");
    System.out.println("Number of children: " + factory.getAllChildInstances().size());
    // test of Read
    Child c = factory.getChild("Nicola");
    System.out.println(c.getOwlIndividual());
    // TODO: test of Update

    // test of Delete
    c.delete();
    System.out.println("Number of children: " + factory.getAllChildInstances().size());

    // save ABox, TBox, RBox to separate files.
    try {
      ontlgAbox = manager.createOntology(ontology.getABoxAxioms(true));
      ontlgTbox = manager.createOntology(ontology.getTBoxAxioms(true));
      ontlgRbox = manager.createOntology(ontology.getRBoxAxioms(true));
    } catch (OWLOntologyCreationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    try {
      manager.saveOntology(ontlgAbox, IRI.create(new File("individual/Abox.owl")));
      manager.saveOntology(ontlgTbox, IRI.create(new File("individual/Tbox.owl")));
      manager.saveOntology(ontlgRbox, IRI.create(new File("individual/Rbox.owl")));
    } catch (OWLOntologyStorageException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    return true;
  }
  public Set<OWLClass> getRootUnsatisfiableClasses() {
    //	    StructureBasedRootClassFinder srd = new StructureBasedRootClassFinder(this.baseReasoner);
    StructuralRootDerivedReasoner srd =
        new StructuralRootDerivedReasoner(this.manager, this.baseReasoner, this.reasonerFactory);
    Set<OWLClass> estimatedRoots = srd.getRootUnsatisfiableClasses();
    this.cls2JustificationMap = new HashMap<OWLClass, Set<Explanation>>();
    Set<OWLAxiom> allAxioms = new HashSet<OWLAxiom>();

    for (OWLOntology ont : this.baseReasoner.getRootOntology().getImportsClosure()) {
      allAxioms.addAll(ont.getLogicalAxioms());
    }

    for (OWLClass cls : estimatedRoots) {
      this.cls2JustificationMap.put(cls, new HashSet<Explanation>());
      System.out.println("POTENTIAL ROOT: " + cls);
    }
    System.out.println("Finding real roots from " + estimatedRoots.size() + " estimated roots");

    int done = 0;
    this.roots.addAll(estimatedRoots);
    for (OWLClass estimatedRoot : estimatedRoots) {
      try {
        PelletExplanationGenerator gen =
            new PelletExplanationGenerator(manager.createOntology(allAxioms));
        OWLDataFactory df = this.manager.getOWLDataFactory();
        Set<Explanation> expls =
            gen.getExplanations(df.getOWLSubClassOfAxiom(estimatedRoot, df.getOWLNothing()));
        cls2JustificationMap.get(estimatedRoot).addAll(expls);
        ++done;
        System.out.println("Done " + done);
      } catch (OWLOntologyCreationException e) {
        e.printStackTrace();
      }
    }
    for (OWLClass clsA : estimatedRoots) {
      for (OWLClass clsB : estimatedRoots)
        if (!clsA.equals(clsB)) {
          Set<Explanation> clsAExpls = cls2JustificationMap.get(clsA);
          Set<Explanation> clsBExpls = cls2JustificationMap.get(clsB);
          boolean clsARootForClsB = false;
          boolean clsBRootForClsA = false;

          for (Explanation clsAExpl : clsAExpls) {
            for (Explanation clsBExpl : clsBExpls)
              if (isRootFor(clsAExpl, clsBExpl)) {
                clsARootForClsB = true;
              } else if (isRootFor(clsBExpl, clsAExpl)) {
                clsBRootForClsA = true;
              }
          }

          Explanation clsAExpl;
          if ((!clsARootForClsB) || (!clsBRootForClsA))
            if (clsARootForClsB) {
              this.roots.remove(clsB);
            } else if (clsBRootForClsA) this.roots.remove(clsA);
        }
    }

    OWLClass clsA;
    return this.roots;
  }
  public void testRenderAndParse() {
    List<File> renderedFiles = new ArrayList<File>();
    String dblocation = System.getProperty("java.io.tmpdir") + File.separator + "hgdbtest";
    HGDBOntologyManager manager = HGOntologyManagerFactory.getOntologyManager(dblocation);
    HGDBOntologyRepository repo = new HGDBOntologyRepository(dblocation);
    VersionManager versionManager = manager.getVersionManager();
    HyperGraph graph = manager.getOntologyRepository().getHyperGraph();

    //
    // IMPORT AND RENDER
    //
    try {
      // repo.dropHypergraph();
      repo.deleteAllOntologies();
      // System.out.println("Running GC");
      // CANNOT RUN GC nullHANDLE problem !!! repo.runGarbageCollector();
      URL ontologyUrl = this.getClass().getResource("/sampleOntology.owl");
      IRI targetIRI = ImportOntologies.importOntology(ontologyUrl, manager);
      // IRI targetIRI = ImportOntologies.importOntology(f2, manager);
      HGDBOntology o = (HGDBOntology) manager.loadOntologyFromOntologyDocument(targetIRI);
      VersionedOntology vo = versionManager.versioned(o.getAtomHandle());
      // MANIPULATE REMOVE CHANGED
      Object[] axioms = o.getAxioms().toArray();
      // remove all axioms 10.
      for (int i = 0; i < axioms.length / 10; i++) {
        int j = i;
        for (; j < i + axioms.length / 100; j++) {
          if (j < axioms.length) {
            manager.applyChange(new RemoveAxiom(o, (OWLAxiom) axioms[j]));
          }
        }
        i = j;
        vo.commit("SameUser", " commit no " + i);
      }
      // RENDER VERSIONED ONTOLOGY, includes data

      // TODO...
      VOWLXMLRenderConfiguration c = new VOWLXMLRenderConfiguration();
      VOWLXMLVersionedOntologyRenderer r = new VOWLXMLVersionedOntologyRenderer(manager);
      File fx = new File(TESTFILE.getAbsolutePath() + " Revision-" + ".xml");
      renderedFiles.add(fx);
      Writer fwriter = new OutputStreamWriter(new FileOutputStream(fx), Charset.forName("UTF-8"));
      r.render(vo, null, fwriter, c);
      //			for (int i = 0; i < vo.getArity(); i++)
      //			{
      //				VOWLXMLRenderConfiguration c = new VOWLXMLRenderConfiguration();
      //				//c.setLastRevisionIndex(i);
      //				VOWLXMLVersionedOntologyRenderer r = new VOWLXMLVersionedOntologyRenderer(manager);
      //				File fx = new File(TESTFILE.getAbsolutePath() + " Revision-" + i + ".xml");
      //				// File fx = new
      //				// File("C:\\_CiRM\\testontos\\CountyVersioned-Rev-"+ i +
      //				// ".vowlxml");
      //				renderedFiles.add(fx);
      //				// File fx = new File("C:\\_CiRM\\testontos\\1 csr-Rev-"+ i +
      //				// ".vowlxml");
      //				Writer fwriter = new OutputStreamWriter(new FileOutputStream(fx),
      // Charset.forName("UTF-8"));
      //				// Full export
      //				r.render(vo, fwriter, c);
      //			}
      System.out.println("DELETE ALL ONTOLOGIES");
      repo.deleteAllOntologies();
      GarbageCollector gc = new GarbageCollector(repo);
      gc.runGarbageCollection(GarbageCollector.MODE_DELETED_ONTOLOGIES);
    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (OWLRendererException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }

    //
    // PARSE
    //
    File f = new File(TESTFILE.getAbsolutePath() + " Revision-" + 10 + ".xml");
    System.out.println("PARSING: " + f + " length: " + (f.length() / 1024) + " kB");
    OWLOntologyDocumentSource source = new FileDocumentSource(f);
    VOWLXMLParser parser = new VOWLXMLParser();
    OWLOntologyEx onto = new OWLTempOntologyImpl(manager, new OWLOntologyID());
    // must have onto for manager in super class
    VOWLXMLDocument versionedOntologyRoot = new VOWLXMLDocument(onto);
    //
    // Create VersionedOntology Revision 10
    try {
      parser.parse(graph, source, versionedOntologyRoot, new OWLOntologyLoaderConfiguration());
      System.out.println("PARSING FINISHED.");
    } catch (OWLOntologyChangeException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (UnloadableImportException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (OWLParserException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (IOException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    if (versionedOntologyRoot.getRenderConfig().heads().isEmpty()
        && versionedOntologyRoot
            .getRenderConfig()
            .roots()
            .contains(versionedOntologyRoot.getRenderConfig().firstRevision())) {
      OWLOntologyID ontologyID = versionedOntologyRoot.getRevisionData().getOntologyID();
      IRI documentIRI =
          IRI.create("hgdb://" + ontologyID.getDefaultDocumentIRI().toString().substring(7));
      HGPersistentHandle ontologyUUID =
          repo.getHyperGraph().getHandleFactory().makeHandle(versionedOntologyRoot.getOntologyID());
      try {
        System.out.println("Storing ontology data for : " + ontologyUUID);
        HGDBOntology o =
            manager
                .getOntologyRepository()
                .createOWLOntology(ontologyID, documentIRI, ontologyUUID);
        storeFromTo(versionedOntologyRoot.getRevisionData(), o);
      } catch (HGDBOntologyAlreadyExistsByDocumentIRIException e) {
        e.printStackTrace();
      } catch (HGDBOntologyAlreadyExistsByOntologyIDException e) {
        e.printStackTrace();
      } catch (HGDBOntologyAlreadyExistsByOntologyUUIDException e) {
        e.printStackTrace();
      }
      // Add version control with full matching history.
      System.out.println("Adding version control history to : " + ontologyUUID);
      //			VersionedOntology voParsed = new VersionedOntology(versionedOntologyRoot.getRevisions(),
      //					versionedOntologyRoot.getChangesets(), graph);
      //			VHGDBOntologyRepository vrepo = (VHGDBOntologyRepository)
      // manager.getOntologyRepository();
      //			System.out.println("Versioned Repository Contents: ");
      //			for (VersionedOntology vox : vrepo.getVersionControlledOntologies())
      //			{
      //				System.out.println("Versioned Ontology: " + vox.getWorkingSetData());
      //				System.out.println("Versioned Ontology Revs: " + vox.getNrOfRevisions());
      //			}
      //			//
      //			// Rendering FULL Versioned Ontology
      //			//
      //			System.out.println("Rendering full versioned ontology after parse and store: " +
      // ontologyUUID);
      //			VOWLXMLVersionedOntologyRenderer r = new VOWLXMLVersionedOntologyRenderer(manager);
      //			File fx = new File(TESTFILE.getAbsolutePath() + "FULL-afterParse.xml");
      //			Writer fwriter;
      //			try
      //			{
      //				fwriter = new OutputStreamWriter(new FileOutputStream(fx), Charset.forName("UTF-8"));
      //				r.render(voParsed, fwriter);
      //			}
      //			catch (IOException e)
      //			{
      //				e.printStackTrace();
      //			}
      //			catch (OWLRendererException e)
      //			{
      //				e.printStackTrace();
      //			}
    } else {
      System.out.println("ERROR: EXPECTING COMPLETE VERSIONED ONTOLOGY");
    }
    // }
  }
  public OWLOntology findLsignature(
      OWLOntology ontology, LogicFragment fragment, Statistics stats) {
    Timer t = new Timer();
    this.stats = stats;
    Logger_MORe.logInfo("extracting " + fragment.toString() + "-signature");
    OWLOntology ret = null;
    OWLOntologyManager manager = ontology.getOWLOntologyManager();
    try {
      ret = manager.createOntology();
      manager.addAxioms(ret, ontology.getAxioms());
    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    }
    lSignatureClasses = new HashSet<OWLClass>();
    lSignatureOther = new HashSet<OWLEntity>();
    compSignatureClasses = new HashSet<OWLClass>();
    compSignatureOther = new HashSet<OWLEntity>();

    LsignatureExtractorLauncher elkSignatureExtractorLauncher = null;
    LsignatureExtractorLauncher elkSignatureExtractorIntegratingRangesLauncher = null;
    LsignatureExtractorViaInverseRewritingLauncher elkSignatureExtractorRewritingInversesLauncher =
        null;

    ForkJoinPool executor = new ForkJoinPool();
    elkSignatureExtractorLauncher =
        new LsignatureExtractorLauncher(ontology, LogicFragment.ELK, false);
    executor.execute(elkSignatureExtractorLauncher);

    if (ret != null) {
      // otherwise we have nowhere to return the axioms in the normalised ontologies necessary to
      // really classify all the extra classses in the lSignature
      if (rewriteInverses) {
        elkSignatureExtractorRewritingInversesLauncher =
            new LsignatureExtractorViaInverseRewritingLauncher(ontology, LogicFragment.ELK);
        executor.execute(elkSignatureExtractorRewritingInversesLauncher);
      }
      if (integrateRanges) {
        elkSignatureExtractorIntegratingRangesLauncher =
            new LsignatureExtractorLauncher(ontology, LogicFragment.ELK, true);
        executor.execute(elkSignatureExtractorIntegratingRangesLauncher);
      }

      // check the output of the normal ELKsignature and cancel the other threads if the lSig is the
      // whole signature
      initialiseLsignature((LsignatureExtractor) elkSignatureExtractorLauncher.join());

      if (compSignatureClasses.isEmpty())
        cancelTasks(
            elkSignatureExtractorIntegratingRangesLauncher,
            elkSignatureExtractorRewritingInversesLauncher);
      else {
        if (elkSignatureExtractorRewritingInversesLauncher != null
            && extendLsignature(
                    (LsignatureExtractor) elkSignatureExtractorRewritingInversesLauncher.join())
                > 0) {
          manager.addAxioms(
              ret,
              ((LsignatureExtractorViaInverseRewritingLauncher)
                      elkSignatureExtractorRewritingInversesLauncher)
                  .getOntology()
                  .getAxioms());
        }
        if (compSignatureClasses.isEmpty())
          cancelTasks(elkSignatureExtractorRewritingInversesLauncher);
        else if (elkSignatureExtractorIntegratingRangesLauncher != null
            && extendLsignature(
                    (LsignatureExtractor) elkSignatureExtractorIntegratingRangesLauncher.join())
                > 0) {
          manager.addAxioms(
              ret,
              ((LsignatureExtractorLauncher) elkSignatureExtractorIntegratingRangesLauncher)
                  .getOntology()
                  .getAxioms());
        }
      }
      stats.updateLsignatureSize(lSignatureClasses.size(), true);
    } else {
      ret = ontology;
      initialiseLsignature((LsignatureExtractor) elkSignatureExtractorLauncher.join());
    }

    Logger_MORe.logInfo(lSignatureClasses.size() + "classes in lSignature");
    Logger_MORe.logDebug(lSignatureClasses.toString());
    Logger_MORe.logInfo(compSignatureClasses.size() + "classes in compSignature");

    // might be a good idea to try to isolate extra axioms in the normalisation/rewriting - is this
    // possible/worth the effort?
    // check the order in which we try to extend the lSignature with each of the rewritten
    // ontologies and consider if one may be better that the other
    Logger_MORe.logDebug(t.duration() + "s to find Lsignature");

    return ret;
  }
    @Override
    protected boolean exec() {
      Timer t = new Timer();
      if (ontology == null) {
        extractor = null;
        return true;
      }
      IRI iri =
          IRI.create("http://www.cs.ox.ac.uk/isg/tools/MORe/ontologies/inverseRewritingModule.owl");
      extractor.findLsignature(ontology, LogicFragment.SHOIQ);
      if (containsNonInternalClasses(
          extractor
              .getCompSignature())) { // then the ontology goes beyond SHOIQ and we need to work
        // with a SHOIQ module rather than the whole ontology
        Set<OWLEntity> aux = getNonInternalClasses(extractor.getLsignature());
        if (aux.isEmpty()) {
          extractor = null;
          Logger_MORe.logDebug(
              t.duration()
                  + "s to find Lsignature with inverseRewriting (failed - empty SHOIQ-signature)");
          return true;
        }
        SyntacticLocalityModuleExtractor moduleExtractor =
            new SyntacticLocalityModuleExtractor(manager, ontology, ModuleType.BOT);
        try {
          //					ontology = manager.createOntology(iri);
          //					manager.addAxioms(ontology, moduleExtractor.extract(aux));
          ontology = moduleExtractor.extractAsOntology(aux, iri);
        } catch (OWLOntologyCreationException e1) {
          extractor = null;
          e1.printStackTrace();
          Logger_MORe.logDebug(
              t.duration()
                  + "s to find Lsignature with inverseRewriting (failed - exception creating a SHOIQ module)");
          return true;
        }
      }

      // if we get this far then we have a nonempty ontology (maybe module) that we need to
      // normalize and then rewrite
      OWLNormalization4MORe normalization = new OWLNormalization4MORe(ontology, true, true, true);
      Rewriter rewriter =
          new Rewriter(normalization.getNormalizedOntology(), normalization.getSortedGCIs());
      if (manager.contains(iri)) manager.removeOntology(ontology);
      Set<OWLAxiom> rewrittenAxioms = rewriter.getRewrittenOntology();
      if (!rewriter.anyRewrittenRoles()) {
        extractor = null;
        Logger_MORe.logDebug(
            t.duration()
                + "s to find Lsignature with inverseRewriting (failed - could not rewrite any roles)");
        return true;
      }
      try {
        ontology = manager.createOntology();
        manager.addAxioms(ontology, rewrittenAxioms);
        extractor = new LsignatureExtractor_reducedGreedyness();
        extractor.findLsignature(ontology, fragment);
      } catch (OWLOntologyCreationException e1) {
        extractor = null;
        e1.printStackTrace();
        Logger_MORe.logDebug(
            t.duration()
                + "s to find Lsignature with inverseRewriting (failed - exception creating ontology for rewritten axioms)");
        return true;
      }
      Logger_MORe.logDebug(t.duration() + "s to find Lsignature with inverseRewriting");
      return true;
    }
Exemple #19
0
  @SuppressWarnings("unchecked")
  public String execute() {

    String sawsdlExt = ".sawsdl";
    String wsdlExt = ".wsdl";
    String sawadlExt = ".sawadl";
    String wadlExt = ".wadl";
    StringBuffer buf = new StringBuffer();

    SAXBuilder sbuilder = new SAXBuilder();
    Document doc = null;

    @SuppressWarnings("rawtypes")
    Map session = ActionContext.getContext().getSession();
    String importURL = "";
    errormsg = "";
    String filename = "";

    System.out.println("wsloc = " + wsloc);
    if (WSFile != null) System.out.println("OWLFile size = " + WSFile.getTotalSpace());

    try {
      XMLParser wsParser = null;
      session.remove("wsname");
      session.remove("wsdlparser");
      session.remove("wadlparser");
      if (wsloc.indexOf("http:") != -1) {
        importURL = wsloc;
        if (wsloc.equalsIgnoreCase(wsdlExt) || wsloc.equalsIgnoreCase(sawsdlExt)) {
          doc = sbuilder.build(importURL);
          wsParser = new SAWSDLParser(doc);
          session.put("wsdlparser", wsParser);
          session.remove("wadlparser");
          int start = importURL.lastIndexOf("/");
          filename = importURL.substring(start, importURL.length());
          session.put("wsname", filename);
        } else if (wsloc.equalsIgnoreCase(wadlExt) || wsloc.equalsIgnoreCase(sawadlExt)) {
          doc = sbuilder.build(importURL);
          wsParser = new WADLParser(doc);
          session.put("wadlparser", wsParser);
          session.remove("wsdlparser");
          int start = importURL.lastIndexOf("/");
          filename = importURL.substring(start, importURL.length());
          session.put("wsname", filename);
        }
      } else {
        if (WSFile != null) {
          if (wsloc.endsWith(wsdlExt) || wsloc.endsWith(sawsdlExt)) {
            doc = sbuilder.build(WSFile);
            wsParser = new SAWSDLParser(doc);
            session.put("wsdlparser", wsParser);
            filename = wsloc;
            session.put("wsname", filename);
          } else if (wsloc.endsWith(wadlExt) || wsloc.endsWith(sawadlExt)) {
            doc = sbuilder.build(WSFile);
            wsParser = new WADLParser(doc);
            session.put("wadlparser", wsParser);
            filename = wsloc;
            session.put("wsname", filename);
          } else {
            errormsg = "File is not wsdl or wadl file.";
          }
        } else {
          errormsg = "WSDL file lost.";
        }
      }

      if (wsParser == null) {
        errormsg = "WSDL is invalidate";
        return ERROR;
      }

      if (isWSDL(doc)) {

        boolean hasSAWSDLNS = false;
        @SuppressWarnings("rawtypes")
        List nameSpaces = doc.getRootElement().getAdditionalNamespaces();
        for (int i = 0; i < nameSpaces.size(); i++) {
          Namespace ns = (Namespace) nameSpaces.get(i);
          if (ns.getURI().equalsIgnoreCase(SAWSDLParser.sawsdlNS.getURI())) {
            hasSAWSDLNS = true;
            break;
          }
        }
        if (!hasSAWSDLNS) {
          doc.getRootElement().addNamespaceDeclaration(SAWSDLParser.sawsdlNS);
        }

        boolean wsdlV1 = ((SAWSDLParser) wsParser).isWsdlV1();
        if (wsdlV1 == true) {
          LoadWSDLTree.loadWSDL((SAWSDLParser) wsParser, buf, filename);
        } else {
          // not implement yet
        }
        innerTreeHtml = buf.toString();
        type = "wsdl";
      } else if (isWADL(doc)) {
        // not implement yet
        LoadWADLTree.loadWADL((WADLParser) wsParser, buf, wsloc);
        innerTreeHtml = buf.toString();
        type = "wadl";
      }

    } catch (IOException e) {
      e.printStackTrace();
      errormsg = e.toString();
    } catch (URISyntaxException e) {
      e.printStackTrace();
      errormsg = e.toString();
    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
      errormsg = e.toString();
    } catch (JDOMException e) {
      e.printStackTrace();
      errormsg = e.toString();
    } catch (Exception e) {
      e.printStackTrace();
      errormsg = e.toString();
    }

    System.out.println("errormsg = " + errormsg);

    System.out.println("draw finish");

    return SUCCESS;
  }