public Map<String, OWLIndividual> returnSitesInOntology() {
    // map that contains the correspondance between the names of the sites
    // and their OWLIndividual representations.
    Map<String, OWLIndividual> returnMap = new HashMap<>();
    List<OWLIndividual> returnList = new ArrayList<>();
    OWLAnnotationProperty hasNameAnnotationProperty =
        OWLFactory.getOWLAnnotationProperty(":individualName", topIxPrefixManager);

    OWLClassExpression tempSiteClassExpression =
        OWLFactory.getOWLClass(":Site", topIxPrefixManager);
    for (OWLClassAssertionAxiom tempClassAssAx :
        topIxOnt.getClassAssertionAxioms(tempSiteClassExpression)) {
      returnList.add(tempClassAssAx.getIndividual());

      Set<OWLAnnotationAssertionAxiom> tempSiteAnnotationsSet =
          topIxOnt.getAnnotationAssertionAxioms(
              tempClassAssAx.getIndividual().asOWLNamedIndividual().getIRI());
      for (OWLAnnotationAssertionAxiom tempAnnotationAssertionAxiom : tempSiteAnnotationsSet) {
        if (tempAnnotationAssertionAxiom.getProperty().equals(hasNameAnnotationProperty)) {
          String tempString = tempAnnotationAssertionAxiom.getValue().toString();
          logger.info(tempString);
          tempString =
              tempString.substring(tempString.indexOf('"') + 1, tempString.indexOf('^') - 1);
          logger.info(tempString);
          logger.info(tempClassAssAx.getIndividual().toString());
          returnMap.put(tempString, tempClassAssAx.getIndividual());
        }
      }
    }
    return returnMap;
  }
 public Set<OWLNamedIndividual> getEntities() {
   Set<OWLNamedIndividual> entities = new HashSet<OWLNamedIndividual>();
   for (OWLOntology ont : ontologies) {
     entities.addAll(ont.getIndividualsInSignature());
   }
   return entities;
 }
 public OWLOntology getMergedOntology() {
   final IRI mergedOntologyIRI =
       IRI.create(queryOntology.getOntologyID().getDefaultDocumentIRI() + "-merged");
   final OWLOntologyManager mm = controller.getOWLOntologyManager();
   if (mm.contains(mergedOntologyIRI)) {
     return mm.getOntology(mergedOntologyIRI);
   } else {
     try {
       final OWLOntology mergedOntology = mm.createOntology(mergedOntologyIRI);
       mm.setOntologyFormat(mergedOntology, new RDFXMLOntologyFormat());
       final String mergedOntologyFileName =
           mergedOntologyIRI
                   .toURI()
                   .toString()
                   .substring(mergedOntologyIRI.toURI().toString().lastIndexOf("/") + 1)
               + ".owl";
       mm.setOntologyDocumentIRI(
           mergedOntology,
           IRI.create(
               controller.getRuleSpec().getOutputDir().toURI() + "/" + mergedOntologyFileName));
       mm.applyChange(
           new AddImport(
               mergedOntology,
               mm.getOWLDataFactory()
                   .getOWLImportsDeclaration(
                       queryOntology.getOntologyID().getDefaultDocumentIRI())));
       return mergedOntology;
     } catch (OWLOntologyCreationException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
       return null;
     }
   }
 }
 protected int calculateValue(OWLNamedIndividual entity) {
   int count = 0;
   for (OWLOntology ont : ontologies) {
     count += ont.getReferencingAxioms(entity).size();
   }
   return count;
 }
  public void setOntology(OWLOntology ont) {
    this.ont = ont;
    List<Object> data = new ArrayList<>();
    data.add(directImportsHeader);

    // @@TODO ordering
    for (OWLImportsDeclaration decl : ont.getImportsDeclarations()) {
      data.add(new OntologyImportItem(ont, decl, editorKit));
    }
    data.add(indirectImportsHeader);
    // @@TODO ordering
    try {
      for (OWLOntology ontRef :
          editorKit.getOWLModelManager().getOWLOntologyManager().getImportsClosure(ont)) {
        if (!ontRef.equals(ont)) {
          for (OWLImportsDeclaration dec : ontRef.getImportsDeclarations()) {
            if (!data.contains(dec)) {
              data.add(new OntologyImportItem(ontRef, dec, editorKit));
            }
          }
        }
      }
    } catch (UnknownOWLOntologyException e) {
      throw new OWLRuntimeException(e);
    }
    setListData(data.toArray());
  }
 public DLOntology_withMaps preprocessAndClausify(OWLOntology rootOntology) {
   OWLDataFactory factory = rootOntology.getOWLOntologyManager().getOWLDataFactory();
   String ontologyIRI =
       rootOntology.getOntologyID().getDefaultDocumentIRI() == null
           ? "urn:hermit:kb"
           : rootOntology.getOntologyID().getDefaultDocumentIRI().toString();
   Collection<OWLOntology> importClosure = rootOntology.getImportsClosure();
   OWLAxioms_withMaps axioms = new OWLAxioms_withMaps();
   OWLNormalization_withMaps normalization =
       new OWLNormalization_withMaps(factory, axioms, 0, m_datatypeManager);
   for (OWLOntology ontology : importClosure) normalization.processOntology(ontology);
   BuiltInPropertyManager_withMaps builtInPropertyManager =
       new BuiltInPropertyManager_withMaps(factory);
   builtInPropertyManager.axiomatizeBuiltInPropertiesAsNeeded(axioms);
   ObjectPropertyInclusionManager_withMaps objectPropertyInclusionManager =
       new ObjectPropertyInclusionManager_withMaps(axioms);
   // now object property inclusion manager added all non-simple properties to
   // axioms.m_complexObjectPropertyExpressions
   // now that we know which roles are non-simple, we can decide which negative object property
   // assertions have to be
   // expressed as concept assertions so that transitivity rewriting applies properly.
   objectPropertyInclusionManager.rewriteNegativeObjectPropertyAssertions(
       factory, axioms, normalization.getDefinitions().size());
   objectPropertyInclusionManager.rewriteAxioms(factory, axioms, 0);
   OWLAxiomsExpressivity_withMaps axiomsExpressivity = new OWLAxiomsExpressivity_withMaps(axioms);
   DLOntology_withMaps dlOntology =
       clausify_withMaps(factory, ontologyIRI, axioms, axiomsExpressivity);
   return dlOntology;
 }
Example #7
0
  public static boolean isDeprecated(OWLModelManager p4Manager, OWLObject o) {
    if (!(o instanceof OWLEntity)) {
      return false;
    }
    for (OWLOntology ontology : p4Manager.getActiveOntologies()) {
      for (OWLAnnotationAssertionAxiom assertion :
          ontology.getAnnotationAssertionAxioms(((OWLEntity) o).getIRI())) {

        if (!assertion.getProperty().isDeprecated()) {
          continue;
        }
        if (!(assertion.getValue() instanceof OWLLiteral)) {
          continue;
        }
        OWLLiteral value = (OWLLiteral) assertion.getValue();
        if (!value.isBoolean()) {
          continue;
        }
        if (value.parseBoolean()) {
          return true;
        }
      }
    }
    return false;
  }
Example #8
0
  private void getPelletIncrementalClassifierRunTime(String baseOnt, String ontDir)
      throws Exception {
    System.out.println("Using Pellet Incremental Classifier...");
    GregorianCalendar start = new GregorianCalendar();
    File ontFile = new File(baseOnt);
    IRI documentIRI = IRI.create(ontFile);
    OWLOntology baseOntology = OWL.manager.loadOntology(documentIRI);
    IncrementalClassifier classifier = new IncrementalClassifier(baseOntology);
    classifier.classify();
    System.out.println("Logical axioms: " + baseOntology.getLogicalAxiomCount());
    System.out.println("Time taken for base ontology (millis): " + Util.getElapsedTime(start));
    File ontDirPath = new File(ontDir);
    File[] allFiles = ontDirPath.listFiles();
    OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    addTripsBaseOntologies(manager);
    int count = 1;
    for (File owlFile : allFiles) {
      IRI owlDocumentIRI = IRI.create(owlFile);
      OWLOntology ontology = manager.loadOntologyFromOntologyDocument(owlDocumentIRI);
      Set<OWLLogicalAxiom> axioms = ontology.getLogicalAxioms();
      for (OWLLogicalAxiom axiom : axioms)
        OWL.manager.applyChange(new AddAxiom(baseOntology, axiom));

      System.out.println("\nLogical axioms: " + baseOntology.getLogicalAxiomCount());
      System.out.println(count + "  file: " + owlFile.getName());
      //			System.out.println("" + count + "  file: " + owlFile.getName());
      //			GregorianCalendar start2 = new GregorianCalendar();
      classifier.classify();
      //	        System.out.println("Time taken (millis): " +
      //					Util.getElapsedTime(start2));
      manager.removeOntology(ontology);
      count++;
    }
    System.out.println("\nTotal time taken (millis): " + Util.getElapsedTime(start));
  }
 @Test
 public void testMinusInf() throws Exception {
   String input =
       "Prefix(xsd:=<http://www.w3.org/2001/XMLSchema#>)\n"
           + "Prefix(owl:=<http://www.w3.org/2002/07/owl#>)\n"
           + "Prefix(:=<http://test.org/test#>)\n"
           + "Ontology(\nDeclaration(NamedIndividual(:a))\n"
           + "Declaration(DataProperty(:dp))\n"
           + "Declaration(Class(:A))\n"
           + "SubClassOf(:A DataAllValuesFrom(:dp owl:real))"
           + "\nSubClassOf(:A \n"
           + "DataSomeValuesFrom(:dp DataOneOf(\"-INF\"^^xsd:float \"-0\"^^xsd:integer))"
           + "\n)"
           + "\n"
           + "ClassAssertion(:A :a)"
           + "\n)";
   StringDocumentSource in = new StringDocumentSource(input);
   OWLOntologyManager m = Factory.getManager();
   OWLOntology o = m.loadOntologyFromOntologyDocument(in);
   StringDocumentTarget t = new StringDocumentTarget();
   m.saveOntology(o, t);
   assertTrue(t.toString() + " should contain -INF", t.toString().contains("-INF"));
   OWLOntology o1 = m.loadOntologyFromOntologyDocument(new StringDocumentSource(t.toString()));
   assertEquals(
       "Obtologies were supposed to be the same", o.getLogicalAxioms(), o1.getLogicalAxioms());
 }
 /**
  * Finds all individuals where more than one type is asserted, which is disallowed by Cirm team
  * convention. Except: Protected is allowed as second class, but will still be returned from this
  * method.
  *
  * @return
  */
 public List<Pair<OWLNamedIndividual, Set<OWLClassExpression>>>
     getIndividualsWithMoreThanOneTypeAsserted() {
   List<Pair<OWLNamedIndividual, Set<OWLClassExpression>>> errors =
       new LinkedList<Pair<OWLNamedIndividual, Set<OWLClassExpression>>>();
   if (DBG)
     System.out.println("Start CirmOntologyValidator::getIndividualsWithMoreThanOneTypeAsserted");
   Set<OWLOntology> owlOntologies = OWL.ontologies();
   for (OWLOntology owlOntology : owlOntologies) {
     if (DBG) System.out.println("Getting individuals in signature of Ontology " + owlOntology);
     Set<OWLNamedIndividual> s = owlOntology.getIndividualsInSignature(false);
     if (DBG) System.out.println("...done.");
     for (OWLNamedIndividual owlNamedIndividual : s) {
       Set<OWLClassExpression> types = owlNamedIndividual.getTypes(owlOntologies);
       if (types.size() > 1) {
         Pair<OWLNamedIndividual, Set<OWLClassExpression>> error =
             new Pair<OWLNamedIndividual, Set<OWLClassExpression>>(owlNamedIndividual, types);
         errors.add(error);
         if (DBG) {
           System.out.println("Found " + types.size() + " types for " + owlNamedIndividual);
           for (OWLClassExpression owlClassExpression : types) {
             System.out.println(owlClassExpression);
           }
         }
       }
     } // for (OWLNamedIndividual
   } // for (OWLOntology
   if (DBG) System.out.println("Total Individuals not passing validation: " + errors.size());
   return errors;
 }
Example #11
0
  @Override
  public KuabaRepository load(String url) throws RepositoryLoadException {

    IRI iri;
    File f = new File(url);
    if (!url.contains(":/")) {
      iri = IRI.create(f);
    } else iri = IRI.create(url);

    try {
      OWLOntology inst = manager.loadOntologyFromOntologyDocument(iri);

      if (inst == null) throw new RepositoryLoadException(url, "Invalid Location.");
      if (inst.getOntologyID().getOntologyIRI() == null) {
        // An anonymous Ontology was loaded, and they are not supported by the Kuaba Subsystem
        manager.removeOntology(inst);
        throw new RepositoryLoadException(url, "It is probably an incompatible or malformed file.");
      }

      KuabaRepository repo = new OwlApiKuabaRepository(inst, manager.getOWLDataFactory());
      repoMap.put(inst.getOntologyID().getOntologyIRI(), repo);
      fileMap.put(repo, f);
      return repo;
    } catch (OWLOntologyAlreadyExistsException ex) {
      OWLOntologyID id = ex.getOntologyID();
      //            System.err.println("Ontologia "+id.getOntologyIRI().toString()+" já está
      // carregada, esta será retornada.");
      return repoMap.get(id.getOntologyIRI());

    } catch (OWLOntologyCreationException ex) {
      throw new RepositoryLoadException(url, "Invalid Location.");
      //            Logger.getLogger(OwlApiFileGateway.class.getName()).log(Level.SEVERE, null, ex);
    }
  }
Example #12
0
  public KuabaRepository createNewRepository(String url, File destination) {
    IRI iri;
    if (url == null) iri = IRI.generateDocumentIRI();
    else iri = IRI.create(url);

    try {
      OWLOntology inst = manager.createOntology(iri);
      OWLImportsDeclaration imp =
          manager.getOWLDataFactory().getOWLImportsDeclaration(IRI.create(ONTOLOGY_URL));
      AddImport addi = new AddImport(inst, imp);
      manager.applyChange(addi);
      KuabaRepository repo = new OwlApiKuabaRepository(inst, manager.getOWLDataFactory());
      repoMap.put(inst.getOntologyID().getOntologyIRI(), repo);
      fileMap.put(repo, destination);

      TemplateGenerator.generateRootQuestion(repo);

      if (destination != null) this.save(repo);
      return repo;
    } catch (OWLOntologyCreationException ex) {
      Logger.getLogger(OwlApiFileGateway.class.getName()).log(Level.SEVERE, null, ex);
    }

    return null;
  }
Example #13
0
  @Override
  public void evaluate(QueryRecord queryRecord) {
    OWLOntology knowledgebase = relevantPart(queryRecord);

    if (knowledgebase == null) {
      Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty());
      return;
    }

    int aboxcount = knowledgebase.getABoxAxioms(true).size();
    Utility.logDebug(
        "ABox axioms: "
            + aboxcount
            + " TBox axioms: "
            + (knowledgebase.getAxiomCount() - aboxcount));
    //		queryRecord.saveRelevantOntology("fragment_query" + queryRecord.getQueryID() + ".owl");

    Timer t = new Timer();
    Checker summarisedChecker = new HermitSummaryFilter(queryRecord, properties.getToCallHermiT());
    //		int validNumber =
    summarisedChecker.check(queryRecord.getGapAnswers());
    summarisedChecker.dispose();
    Utility.logDebug("Total time for full reasoner: " + t.duration());
    //		if (validNumber == 0) {
    queryRecord.markAsProcessed();
    Utility.logDebug("Difficulty of this query: " + queryRecord.getDifficulty());
    //		}
  }
  public void retrieveRoomsMap() {
    // this method retrieves the rooms to be insserted in the two rooms lists.
    // it deposits the retrieved data in a map where the key is the room name (String) and the value
    // is the corresponding entity IRI.
    // the above data structure will be used in passing the entity IRIs in the jess working memory
    // when constructing the facts
    // when the respective room name (String) is selected in the rooms list combo box.

    for (OWLSubClassOfAxiom scoAx :
        topIxOnt.getSubClassAxiomsForSuperClass(
            OWLFactory.getOWLClass(
                IRI.create(
                    "http://www.semanticweb.org/ontologies/ptyxiaki_v0.6/2011/5/Ontology1308067064597.owl#Room")))) {
      String tmpS = scoAx.getSubClass().toString();

      Set<OWLAnnotationAssertionAxiom> tmpAnnSet =
          topIxOnt.getAnnotationAssertionAxioms(
              IRI.create(tmpS.substring(tmpS.indexOf('<') + 1, tmpS.indexOf('>'))));
      for (OWLAnnotationAssertionAxiom aaAx : tmpAnnSet) {
        if (aaAx.getProperty()
            .toString()
            .equals(
                "<http://www.semanticweb.org/ontologies/ptyxiaki_v0.6/2011/5/Ontology1308067064597.owl#classID>")) {
          roomToIRI.put(
              aaAx.getValue().toString().substring(1, aaAx.getValue().toString().indexOf('^') - 1),
              tmpS.substring(tmpS.indexOf('<') + 1, tmpS.indexOf('>')));
        }
      }
    }
  }
Example #15
0
  public static Map<EdamUri, Concept> load(String edamPath) throws OWLOntologyCreationException {

    OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    OWLOntology ontology = manager.loadOntologyFromOntologyDocument(new File(edamPath));

    String prefix = ontology.getOntologyID().getOntologyIRI().get().toString();

    return ontology
        .classesInSignature()
        .filter(c -> EdamUri.isEdamUri(c.getIRI().toString(), prefix))
        .collect(
            Collectors.toMap(
                c -> new EdamUri(c.getIRI().toString(), prefix),
                c -> {
                  Concept concept = new Concept();
                  EntitySearcher.getAnnotations(c, ontology)
                      .forEachOrdered(
                          a -> {
                            if (a.getProperty().isLabel())
                              concept.setLabel(a.getValue().asLiteral().get().getLiteral());
                            else if (a.getProperty().isDeprecated()) concept.setObsolete(true);
                            else if (a.getProperty()
                                    .toStringID()
                                    .equals(
                                        "http://www.geneontology.org/formats/oboInOwl#hasExactSynonym")
                                && a.getValue().asLiteral().isPresent())
                              concept.addExactSynonym(a.getValue().asLiteral().get().getLiteral());
                            else if (a.getProperty()
                                    .toStringID()
                                    .equals(
                                        "http://www.geneontology.org/formats/oboInOwl#hasNarrowSynonym")
                                && a.getValue().asLiteral().isPresent())
                              concept.addNarrowSynonym(a.getValue().asLiteral().get().getLiteral());
                            else if (a.getProperty()
                                    .toStringID()
                                    .equals(
                                        "http://www.geneontology.org/formats/oboInOwl#hasBroadSynonym")
                                && a.getValue().asLiteral().isPresent())
                              concept.addBroadSynonym(a.getValue().asLiteral().get().getLiteral());
                            else if (a.getProperty()
                                    .toStringID()
                                    .equals(
                                        "http://www.geneontology.org/formats/oboInOwl#hasDefinition")
                                && a.getValue().asLiteral().isPresent())
                              concept.setDefinition(a.getValue().asLiteral().get().getLiteral());
                            else if (a.getProperty().isComment()
                                && a.getValue().asLiteral().isPresent())
                              concept.setComment(a.getValue().asLiteral().get().getLiteral());
                          });
                  if (concept.getLabel().isEmpty())
                    throw new IllegalStateException(
                        String.format("Label of concept %s is empty", c.getIRI()));
                  return concept;
                },
                (u, v) -> {
                  throw new IllegalStateException(String.format("Duplicate key %s", u));
                },
                LinkedHashMap::new));
  }
 private OWLOntology findOntology(List<OWLOntology> loadedOntologies, IRI documentIRI) {
   for (OWLOntology o : loadedOntologies) {
     if (documentIRI.equals(o.getOntologyID().getOntologyIRI())) {
       return o;
     }
   }
   return null;
 }
 private void generateChanges() {
   changes = new ArrayList<OWLOntologyChange>();
   for (OWLOntology ont : ontologies) {
     for (OWLClassAxiom ax : ont.getAxioms(AxiomType.DISJOINT_CLASSES)) {
       changes.add(new RemoveAxiom(ont, ax));
     }
   }
 }
  public OWLOntology loadOWLOntology(
      OWLOntologyDocumentSource documentSource, final OWLOntologyCreationHandler mediator)
      throws OWLOntologyCreationException {
    // Attempt to parse the ontology by looping through the parsers.  If the
    // ontology is parsed successfully then we break out and return the ontology.
    // I think that this is more reliable than selecting a parser based on a file extension
    // for example (perhaps the parser list could be ordered based on most likely parser, which
    // could be determined by file extension).
    Map<OWLParser, OWLParserException> exceptions =
        new LinkedHashMap<OWLParser, OWLParserException>();
    // Call the super method to create the ontology - this is needed, because
    // we throw an exception if someone tries to create an ontology directly

    OWLOntology existingOntology = null;
    IRI iri = documentSource.getDocumentIRI();
    if (getOWLOntologyManager().contains(iri)) {
      existingOntology = getOWLOntologyManager().getOntology(iri);
    }
    OWLOntologyID ontologyID = new OWLOntologyID();
    OWLOntology ont =
        super.createOWLOntology(ontologyID, documentSource.getDocumentIRI(), mediator);
    // Now parse the input into the empty ontology that we created
    for (final OWLParser parser : getParsers()) {
      try {
        if (existingOntology == null && !ont.isEmpty()) {
          // Junk from a previous parse.  We should clear the ont
          getOWLOntologyManager().removeOntology(ont);
          ont = super.createOWLOntology(ontologyID, documentSource.getDocumentIRI(), mediator);
        }
        OWLOntologyFormat format = parser.parse(documentSource, ont);
        mediator.setOntologyFormat(ont, format);
        return ont;
      } catch (IOException e) {
        // No hope of any parsers working?
        // First clean up
        getOWLOntologyManager().removeOntology(ont);
        throw new OWLOntologyCreationIOException(e);
      } catch (UnloadableImportException e) {
        // First clean up
        getOWLOntologyManager().removeOntology(ont);
        throw e;
      } catch (OWLParserException e) {
        // Record this attempts and continue trying to parse.
        exceptions.put(parser, e);
      } catch (RuntimeException e) {
        // Clean up and rethrow
        getOWLOntologyManager().removeOntology(ont);
        throw e;
      }
    }
    if (existingOntology == null) {
      getOWLOntologyManager().removeOntology(ont);
    }
    // We haven't found a parser that could parse the ontology properly.  Throw an
    // exception whose message contains the stack traces from all of the parsers
    // that we have tried.
    throw new UnparsableOntologyException(documentSource.getDocumentIRI(), exceptions);
  }
Example #19
0
 @Override
 protected Set<? extends OWLAxiom> createAxioms() {
   OWLOntology ont = getOWLOntology("Ont");
   OWLClass cls =
       getFactory().getOWLClass(IRI.create(ont.getOntologyID().getOntologyIRI() + "/Office"));
   Set<OWLAxiom> axs = new HashSet<OWLAxiom>();
   axs.add(getFactory().getOWLDeclarationAxiom(cls));
   return axs;
 }
Example #20
0
  private void runWithSeparateFiles() {
    if (owlFile == null) {
      throw new NullPointerException("You have to specify an ontology file!");
    }

    OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    OWLOntology ontology = null;
    OBDADataFactory obdaDataFactory = OBDADataFactoryImpl.getInstance();
    try {
      ontology = manager.loadOntologyFromOntologyDocument((new File(owlFile)));

      if (disableReasoning) {
        /*
         * when reasoning is disabled, we extract only the declaration assertions for the vocabulary
         */
        ontology = extractDeclarations(manager, ontology);
      }

      Collection<Predicate> predicates = new ArrayList<>();

      for (OWLClass owlClass : ontology.getClassesInSignature()) {
        Predicate predicate = obdaDataFactory.getClassPredicate(owlClass.getIRI().toString());
        predicates.add(predicate);
      }
      for (OWLDataProperty owlDataProperty : ontology.getDataPropertiesInSignature()) {
        Predicate predicate =
            obdaDataFactory.getDataPropertyPredicate(owlDataProperty.getIRI().toString());
        predicates.add(predicate);
      }
      for (OWLObjectProperty owlObjectProperty : ontology.getObjectPropertiesInSignature()) {
        Predicate predicate =
            obdaDataFactory.getObjectPropertyPredicate(owlObjectProperty.getIRI().toString());
        predicates.add(predicate);
      }

      OBDAModel obdaModel = loadMappingFile(mappingFile);

      Ontology inputOntology = OWLAPI3TranslatorUtility.translate(ontology);

      obdaModel.declareAll(inputOntology.getVocabulary());

      int numPredicates = predicates.size();

      int i = 1;
      for (Predicate predicate : predicates) {
        System.err.println(String.format("Materializing %s (%d/%d)", predicate, i, numPredicates));
        serializePredicate(ontology, inputOntology, obdaModel, predicate, outputFile, format);
        i++;
      }

    } catch (OWLOntologyCreationException e) {
      e.printStackTrace();
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
 public DLQueryParser(OWLOntology rootOntology, ShortFormProvider shortFormProvider) {
   this.rootOntology = rootOntology;
   OWLOntologyManager manager = rootOntology.getOWLOntologyManager();
   Set<OWLOntology> importsClosure = rootOntology.getImportsClosure();
   // Create a bidirectional short form provider to do the actual mapping.
   // It will generate names using the input
   // short form provider.
   bidiShortFormProvider =
       new BidirectionalShortFormProviderAdapter(manager, importsClosure, shortFormProvider);
 }
 public void visit(OWLOntology ontology) {
   checkOccurrence(ontology.getAnnotations());
   for (AxiomType<?> t : AxiomType.AXIOM_TYPES) {
     for (OWLAxiom ax : ontology.getAxioms(t)) {
       checkOccurrence(ax.getAnnotations());
       ax.accept(this);
     }
   }
   singleAppearance.clear();
 }
 public Set<OWLAxiom> write(SWRLRule rule) {
   Set<OWLAxiom> axioms = new HashSet<OWLAxiom>(1);
   for (OWLOntology ontology : getOntologies()) {
     if (ontology.containsAxiom(rule)) {
       writeSection(RULE, Collections.singleton(rule), "", true, ontology);
       axioms.add(rule);
     }
   }
   return axioms;
 }
 @Test
 public void testCreateAnonymousOntology() throws Exception {
   OWLOntologyManager manager = Factory.getManager();
   OWLOntology ontology = manager.createOntology();
   assertNotNull("ontology should not be null", ontology);
   assertNotNull("ontology id should not be null", ontology.getOntologyID());
   assertNull(ontology.getOntologyID().getDefaultDocumentIRI());
   assertNull(ontology.getOntologyID().getOntologyIRI());
   assertNull(ontology.getOntologyID().getVersionIRI());
   assertNotNull("iri should not be null", manager.getOntologyDocumentIRI(ontology));
 }
  public void retrieveSubObjectPropertyAxioms() {
    // this method is to perform a dual operation; it shall retrieve all the declarative object
    // properties categories' annotations,
    // thus filling the objPropCategories Set, while parallelly will, for each category entry
    // retrieve the object properties themselves
    // and adding their entries in the objPropEntries Set.
    Set<OWLSubObjectPropertyOfAxiom> tmpSet;
    OWLObjectPropertyExpression tempDeclarativePropertyClass =
        OWLFactory.getOWLObjectProperty(":DeclarativeProperty", topIxPrefixManager);
    tmpSet =
        topIxOnt.getObjectSubPropertyAxiomsForSuperProperty(
            tempDeclarativePropertyClass); // OWLFactory.getOWLObjectProperty(IRI.create("http://www.semanticweb.org/ontologies/ptyxiaki_v0.6/2011/5/Ontology1308067064597.owl#DeclarativeProperty")));
    Set<OWLSubObjectPropertyOfAxiom> tmpSet2;

    Set<OWLAnnotationAssertionAxiom> tmpAnnSet1;
    Set<OWLAnnotationAssertionAxiom> tmpAnnSet2;
    // to become class variables.

    for (OWLSubObjectPropertyOfAxiom sopAx : tmpSet) {
      String tmpS = sopAx.getSubProperty().toString();
      // categories...
      tmpAnnSet1 =
          topIxOnt.getAnnotationAssertionAxioms(
              IRI.create(
                  tmpS.substring(
                      1, tmpS.indexOf('>')))); // this set only contains one annotation per entry
      for (OWLAnnotationAssertionAxiom aaAx : tmpAnnSet1) {
        String currentObjPropCatName =
            aaAx.getValue().toString().substring(1, aaAx.getValue().toString().indexOf('^') - 1);

        tmpSet2 =
            topIxOnt.getObjectSubPropertyAxiomsForSuperProperty(
                OWLFactory.getOWLObjectProperty(IRI.create(tmpS.substring(1, tmpS.length() - 1))));
        for (OWLSubObjectPropertyOfAxiom sopAx2 : tmpSet2) {
          String tmpS2 = sopAx2.getSubProperty().toString();
          tmpAnnSet2 =
              topIxOnt.getAnnotationAssertionAxioms(
                  IRI.create(tmpS2.substring(1, tmpS2.length() - 1)));

          for (OWLAnnotationAssertionAxiom aaAx2 : tmpAnnSet2) {
            String currentObjPropEntryName =
                aaAx2
                    .getValue()
                    .toString()
                    .substring(1, aaAx2.getValue().toString().indexOf('^') - 1);
            propEntryNameToPropCatName.put(currentObjPropEntryName, currentObjPropCatName);
            propEntryNametoPropEntryIRI.put(
                currentObjPropEntryName, tmpS2.substring(1, tmpS2.length() - 1));
          }
        }
      }
    }
  }
 @Test
 public void testCreateOntologyWithIRI() throws Exception {
   OWLOntologyManager manager = Factory.getManager();
   IRI ontologyIRI = IRI("http://www.semanticweb.org/ontologies/ontology");
   OWLOntology ontology = manager.createOntology(ontologyIRI);
   assertNotNull("ontology should not be null", ontology);
   assertNotNull("ontology id should not be null", ontology.getOntologyID());
   assertEquals(ontologyIRI, ontology.getOntologyID().getDefaultDocumentIRI());
   assertEquals(ontologyIRI, ontology.getOntologyID().getOntologyIRI());
   assertNull(ontology.getOntologyID().getVersionIRI());
   assertEquals(ontologyIRI, manager.getOntologyDocumentIRI(ontology));
 }
 private boolean isAtomic(OWLClassExpression classExpression) {
   if (classExpression.isAnonymous()) {
     return false;
   } else {
     for (OWLOntology ont : ontologies) {
       if (!ont.getAxioms((OWLClass) classExpression, EXCLUDED).isEmpty()) {
         return false;
       }
     }
     return true;
   }
 }
Example #28
0
 public LsignatureExtractorViaInverseRewritingLauncher(
     OWLOntology ontology, LogicFragment fragment) {
   this.ontology = null;
   try {
     manager = ontology.getOWLOntologyManager();
     this.ontology = manager.createOntology();
     manager.addAxioms(this.ontology, ontology.getAxioms());
   } catch (OWLOntologyCreationException e) {
     e.printStackTrace();
   }
   this.fragment = fragment;
 }
 private List<Construct> getOrderedConstructs() {
   constructs.clear();
   constructs.add(AL);
   for (OWLOntology ont : ontologies) {
     for (OWLAxiom ax : ont.getLogicalAxioms()) {
       ax.accept(this);
     }
   }
   pruneConstructs();
   List<Construct> cons = new ArrayList<Construct>(constructs);
   Collections.sort(cons, new ConstructComparator());
   return cons;
 }
Example #30
0
 public LsignatureExtractorLauncher(
     OWLOntology ontology, LogicFragment fragment, boolean integrateRangesFirst) {
   this.ontology = null;
   try {
     OWLOntologyManager manager = ontology.getOWLOntologyManager();
     this.ontology = manager.createOntology();
     manager.addAxioms(this.ontology, ontology.getAxioms());
   } catch (OWLOntologyCreationException e) {
     e.printStackTrace();
   }
   this.fragment = fragment;
   this.integrateRangesFirst = integrateRangesFirst;
 }