Пример #1
0
  /**
   * Initialise les composants du conteneur sémantique une fois les URI des ontologies dispatchées
   * par OwlDistributionInitializer.
   */
  public void semInit() throws ExceptionInInitializerError {
    if (isInitialized()) {
      /** Initialisation de l'ontologie locale. Cette ontologie n'est plus modifiée ensuite. */
      // Initialisation du manager de l'ontologie locale. N'est utilisé qu'ici.
      OWLOntologyManager localOntoManager = OWLManager.createOWLOntologyManager();
      OWLOntologyIRIMapper localOntoMapper =
          new SimpleIRIMapper(getOntologyIri(), getPhysicalIri());
      localOntoManager.addIRIMapper(localOntoMapper);
      // Initialisation de l'ontologie
      try {
        // On crée l'ontologie locale.
        this.localOntology = localOntoManager.loadOntology(getPhysicalIri());
      } catch (OWLOntologyCreationException e) {
        e.printStackTrace();
      }
      // System.out.println("localOntology : " + localOntology + " ; pyhsicalIri : " +
      // getPhysicalIri().toString());

      /**
       * Initialisation de l'ontologie du reasoner, qui contiendra l'ontologie locale plus d'autres
       * axiomes éventuels. On crée l'instance, qu'on rattache à un manager et un reasoner
       * accessibles par des getters. L'instance reste la même, mais le contenu change.
       */
      // Initialisation du manager de l'ontologie du reasoner
      this.manager = OWLManager.createOWLOntologyManager();
      // On crée une deuxième ontologie.
      try {
        IRI ontoName = IRI.create(this.nameSpace);
        OWLOntologyID ontoId = new OWLOntologyID(ontoName);
        this.reasoningOntology = this.manager.createOntology(ontoId);
      } catch (OWLOntologyCreationException e) {
        e.printStackTrace();
      }
      // Initialisation du reasoner
      PelletReasonerFactory reasonerFactory = PelletReasonerFactory.getInstance();

      PelletOptions.USE_INCREMENTAL_CONSISTENCY = true;
      PelletOptions.USE_COMPLETION_QUEUE = true;

      // PelletReasoner reasoner = reasonerFactory.createReasoner(reasoningOntology);
      PelletReasoner reasoner = reasonerFactory.createNonBufferingReasoner(reasoningOntology);

      // add the reasoner as an ontology change listener
      this.manager.addOntologyChangeListener(reasoner);

      reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);
      /*			reasoner.precomputeInferences(InferenceType.CLASS_ASSERTIONS);
      			reasoner.precomputeInferences(InferenceType.DATA_PROPERTY_HIERARCHY);
      			reasoner.precomputeInferences(InferenceType.DISJOINT_CLASSES);
      			reasoner.precomputeInferences(InferenceType.OBJECT_PROPERTY_HIERARCHY);
      			reasoner.precomputeInferences(InferenceType.SAME_INDIVIDUAL);

      */ this.reasoner = reasoner;
    } else
      throw new ExceptionInInitializerError(
          "Paramètres de l'ontologie non initialisés ; ontoParamsInit = "
              + Reflection.getCallerClass(2));

    // initOntoReasoner();
  }
Пример #2
0
  public static void main(String[] args) {
    try {
      SimpleRenderer renderer = new SimpleRenderer();
      renderer.setShortFormProvider(
          new DefaultPrefixManager("http://www.mindswap.org/ontologies/tambis-full.owl#"));
      ToStringRenderer.getInstance().setRenderer(renderer);
      OWLOntologyManager man = OWLManager.createOWLOntologyManager();
      OWLOntology ont =
          man.loadOntology(
              IRI.create(
                  "http://owl.cs.manchester.ac.uk/repository/download?ontology=http://www.cs.manchester.ac.uk/owl/ontologies/tambis-patched.owl"));

      System.out.println("Loaded!");
      OWLReasonerFactory reasonerFactory = PelletReasonerFactory.getInstance();
      OWLReasoner reasoner = reasonerFactory.createNonBufferingReasoner(ont);
      reasoner.getUnsatisfiableClasses();
      ExplanationBasedRootClassFinder rdr =
          new ExplanationBasedRootClassFinder(man, reasoner, reasonerFactory);
      for (OWLClass cls : rdr.getRootUnsatisfiableClasses()) System.out.println("ROOT! " + cls);
    } catch (TimeOutException e) {
      e.printStackTrace();
    } catch (ReasonerInterruptedException e) {
      e.printStackTrace();
    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    }
  }
Пример #3
0
  public static void main(String[] args) throws Exception {
    String url =
        "https://raw.github.com/structureddynamics/Bibliographic-Ontology-BIBO/master/bibo.xml.owl";
    NamingIssueDetection namingIssueDetection = new NamingIssueDetection("/opt/wordnet");

    OntModel model = ModelFactory.createOntologyModel(OntModelSpec.RDFS_MEM);
    model.read(url, null);

    Set<NamingIssue> nonMatchingChildren =
        namingIssueDetection.detectNonExactMatchingDirectChildIssues(model);
    System.out.println(nonMatchingChildren);

    nonMatchingChildren = namingIssueDetection.detectNonExactMatchingDirectChildIssues(model);
    System.out.println(nonMatchingChildren);

    OWLOntologyManager man = OWLManager.createOWLOntologyManager();
    OWLOntology ontology = man.loadOntology(IRI.create(url));

    nonMatchingChildren = namingIssueDetection.detectNonExactMatchingDirectChildIssues(ontology);
    System.out.println(nonMatchingChildren);

    nonMatchingChildren = namingIssueDetection.detectNonExactMatchingDirectChildIssues(ontology);
    System.out.println(nonMatchingChildren);

    nonMatchingChildren = namingIssueDetection.detectNonMatchingChildIssues(ontology, false);
    System.out.println(nonMatchingChildren);

    nonMatchingChildren = namingIssueDetection.detectNonMatchingChildIssues(ontology, true);
    System.out.println(nonMatchingChildren);
  }
Пример #4
0
 public static void callLogMap() throws OWLOntologyCreationException {
   String onto1_iri = "http://oaei.ontologymatching.org/tests/101/onto.rdf";
   String onto2_iri = "http://oaei.ontologymatching.org/tests/304/onto.rdf";
   OWLOntologyManager onto_manager = OWLManager.createOWLOntologyManager();
   OWLOntology onto1 = onto_manager.loadOntology(IRI.create(onto1_iri));
   OWLOntology onto2 = onto_manager.loadOntology(IRI.create(onto2_iri));
   LogMap2_Matcher logmap2 = new LogMap2_Matcher(onto1, onto2);
   Set<MappingObjectStr> logmap2_mappings = logmap2.getLogmap2_Mappings();
   for (MappingObjectStr mappingObject : logmap2_mappings) {
     System.out.println(" test des mots : " + mappingObject.toString());
   }
   System.out.println("Num.mappings: " + logmap2_mappings.size());
 }
  /** This method gets an OWL ontology used as an input for the supplied rule. */
  private OWLOntology getInputOntologyForRule(IRI iri) throws OWLOntologyCreationException {
    if (iri.getScheme().equals(PREFIX)) {
      iri = getOntologyIRIForRuleName(iri.toString().substring(PREFIX.length()));
    }

    final OWLOntologyManager m = controller.getOWLOntologyManager();
    OWLOntology o;
    if (m.contains(iri)) {
      o = m.getOntology(iri);
    } else {
      o = m.loadOntology(iri);
    }
    return o;
  }
Пример #6
0
  public void getELKIncrementalRuntime(String baseOnt, String ontDir) throws Exception {
    GregorianCalendar start = new GregorianCalendar();
    OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    IRI documentIRI = IRI.create(new File(baseOnt));
    OWLOntology baseOntology = manager.loadOntology(documentIRI);
    System.out.println("Logical axioms: " + baseOntology.getLogicalAxiomCount());
    //		findDataHasValueAxiom(baseOntology.getAxioms(AxiomType.SUBCLASS_OF));

    OWLReasonerFactory reasonerFactory = new ElkReasonerFactory();
    OWLReasoner reasoner = reasonerFactory.createReasoner(baseOntology);
    reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY);
    File[] files = new File(ontDir).listFiles();
    int count = 0;
    OWLOntologyManager manager2 = OWLManager.createOWLOntologyManager();
    addTripsBaseOntologies(manager2);
    for (File file : files) {
      System.out.println("File name: " + file.getName());
      documentIRI = IRI.create(file);
      OWLOntology ontology = manager2.loadOntology(documentIRI);
      Set<OWLLogicalAxiom> axioms = ontology.getLogicalAxioms();
      //	    	findDataHasValueAxiom(ontology.getAxioms(AxiomType.SUBCLASS_OF));
      manager.addAxioms(baseOntology, axioms);
      reasoner.flush();
      System.out.println("Logical axioms: " + baseOntology.getLogicalAxiomCount());

      // From the ELK wiki, it seems ABox reasoning will trigger TBox reasoning
      reasoner.precomputeInferences(InferenceType.CLASS_ASSERTIONS);

      manager2.removeOntology(ontology);
      count++;
      System.out.println("Done with " + count);
      //	    	if(count == 5)
      //	    		break;
    }
    reasoner.dispose();
    System.out.println("Time taken (millis): " + Util.getElapsedTime(start));
  }
Пример #7
0
 private OntologyManager() {
   _manager = OWLManager.createOWLOntologyManager();
   try {
     _ontology = _manager.loadOntology(IRI.create(getClass().getResource(ONTOLOGY_RESOURCE_PATH)));
     for (OWLAnnotation anon : _ontology.getAnnotations()) {
       if (anon.getProperty().getIRI().toString().equals(DomainOntology.taxonomyDefinedBy)) {
         _taxonomyType = anon.getValue().toString().replace("\"", "");
         break;
       }
     }
     _reasoner = new StructuralReasonerFactory().createReasoner(_ontology);
   } catch (OWLOntologyCreationException ex) {
     Log.e(TAG, "Error while loading the domain ontology");
     _taxonomyType = null;
   }
 }
  public void loadOntology() {
    try {
      /*
       * --apaitoymena vimata gia th dhmioyrgia mapped ontologias:
       * 1.dhmioyrgoyme ena toBeMappedIRI me skopo toy thn antistoixish me
       * to local File. 2.dhmioyrgoyme ena File me th dieythynsh ths
       * ontologias sto topiko apothikeytiko meso. 3.dhmioyrgoyme enan
       * SimpleIRIMapper kai ton prosthetoyme mesw toy manager. o
       * SimpleIRIMapper syndeei to toBeMappedIRI poy dwsame arxika, me
       * thn fysikh topothesia ths ontologias sto topiko apothikeytiko
       * meso. 4.dhmioyrgoyme ena ontologyIRI me akrivws thn idia arxiki
       * timh me to toBeMappedIRI to opoio tha einai to IRI ths ontologias
       * mas 5.dhmioyrgoyme thn ontologia mas xrhsimopoiwntas to
       * manager.loadOntology(ontologyIRI);
       */
      String sep = File.separator;

      manager = OWLManager.createOWLOntologyManager();
      toBeMappedIRI =
          IRI.create(
              "http://www.semanticweb.org/ontologies/ptyxiaki_v0.6/2011/5/Ontology1308067064597.owl");
      // ontFile = new File("../src/ontologyresources/ptyxiaki_v0.8.owl");
      ontFile = new File("src/ontologyresources/ptyxiaki_v0.8.owl");
      // in case of alternative location on load time when the application is jar'ed!
      if (!ontFile.canRead()) {
        ontFile = new File("ontologyresources/ptyxiaki_v0.8.owl");
      }
      manager.addIRIMapper(new SimpleIRIMapper(toBeMappedIRI, IRI.create(ontFile)));
      ontologyIRI =
          IRI.create(
              "http://www.semanticweb.org/ontologies/ptyxiaki_v0.6/2011/5/Ontology1308067064597.owl");
      topIxOnt = manager.loadOntology(ontologyIRI);
      OWLFactory = manager.getOWLDataFactory();
      topIxFormat = manager.getOntologyFormat(topIxOnt);
      topIxPrefixManager =
          new DefaultPrefixManager(topIxOnt.getOntologyID().getOntologyIRI().toString() + '#');

      System.out.println("loaded ontology: " + this.topIxOnt);
      System.out.println("from: " + this.manager.getOntologyDocumentIRI(this.topIxOnt));

    } catch (OWLException oex) {
      logger.info(oex.getMessage());
    }
  }
Пример #9
0
  private String parseWithOWLAPI(
      URL ontologyURL,
      boolean useOWLAPI,
      boolean considerImportedOntologies,
      boolean considerImportedClosure,
      boolean useReasoner)
      throws OWLOntologyCreationException, OWLOntologyStorageException, URISyntaxException {
    String result = "";

    if (useOWLAPI) {
      OWLOntologyManager manager = OWLManager.createOWLOntologyManager();

      OWLOntology ontology = manager.loadOntology(IRI.create(ontologyURL.toString()));

      if (considerImportedClosure || considerImportedOntologies) {
        Set<OWLOntology> setOfImportedOntologies = new HashSet<OWLOntology>();
        if (considerImportedOntologies) {
          setOfImportedOntologies.addAll(ontology.getDirectImports());
        } else {
          setOfImportedOntologies.addAll(ontology.getImportsClosure());
        }
        for (OWLOntology importedOntology : setOfImportedOntologies) {
          manager.addAxioms(ontology, importedOntology.getAxioms());
        }
      }

      if (useReasoner) {
        ontology = parseWithReasoner(manager, ontology);
      }

      StringDocumentTarget parsedOntology = new StringDocumentTarget();

      manager.saveOntology(ontology, new RDFXMLOntologyFormat(), parsedOntology);
      result = parsedOntology.toString();
    }

    return result;
  }