@Override public void removeSubDataProperty(String subDataPropertyName, String dataPropertyName) { OWLDataFactory factory = manager.getOWLDataFactory(); OWLDataProperty dataProperty = factory.getOWLDataProperty(IRI.create(prefix + dataPropertyName)); OWLDataProperty subDataProperty = factory.getOWLDataProperty(IRI.create(prefix + subDataPropertyName)); OWLAxiom axiom = factory.getOWLSubDataPropertyOfAxiom(subDataProperty, dataProperty); axiom = changeManager.getAnnotatedAxiom(axiom); RemoveAxiom removeAxiom = new RemoveAxiom(localContext, axiom); try { synchronized (this) { changeManager.validateRemovalChange(axiom); manager.applyChange(removeAxiom); } } catch (RemovalException ex) { logger.severe( ex.getMessage() + " Change ( removeSubDataProperty " + subDataPropertyName + " of " + dataPropertyName + ") will not be applied."); } }
@SuppressWarnings("javadoc") public class BadDataPropertyRT extends TestCase { private static final OWLDataFactory factory = Factory.getFactory(); public static final String NS = "http://test.org/DataPropertyRestriction.owl"; public static final OWLDataProperty P = factory.getOWLDataProperty(IRI.create(NS + "#p")); public static final OWLClass A = factory.getOWLClass(IRI.create(NS + "#A")); @Test public void testBadDataproperty() throws Exception { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = manager.createOntology(IRI.create(NS)); OWLClassExpression restriction = factory.getOWLDataSomeValuesFrom( P, factory.getOWLDatatype(XSDVocabulary.DURATION.getIRI())); OWLAxiom axiom = factory.getOWLSubClassOfAxiom(A, restriction); manager.addAxiom(ontology, axiom); assertTrue(ontology.containsDataPropertyInSignature(P.getIRI())); StringDocumentTarget t = new StringDocumentTarget(); manager.saveOntology(ontology, new RDFXMLOntologyFormat(), t); manager.removeOntology(ontology); ontology = manager.loadOntologyFromOntologyDocument(new StringDocumentSource(t.toString())); assertTrue(ontology.containsDataPropertyInSignature(P.getIRI())); } }
/** * Gets the IconBox for the specified image IRI. * * @param iri The IRI pointing to the image. * @return The icon box containing the image. * @throws MalformedURLException */ private IconBox getImageBox(IRI iri) throws MalformedURLException { ImageIcon imageIcon = new ImageIcon(iri.toURI().toURL()); imageIcon.getImageLoadStatus(); IconBox iconBox = new IconBox(imageIcon, new HTTPLink(iri.toURI())); iconBox.setMaxHeight(50); return iconBox; }
private void translateTaxon(Taxon taxon, OWLNamedIndividual otu) { this.taxonOTUToOWLMap.put(taxon, otu); this.addClass(otu, this.factory.getOWLClass(IRI.create(CDAO.OTU))); if (StringUtils.isNotBlank(taxon.getPublicationName())) { final OWLLiteral label = this.factory.getOWLLiteral(taxon.getPublicationName()); this.addAnnotation(OWLRDFVocabulary.RDFS_LABEL.getIRI(), otu.getIRI(), label); } if (taxon.getValidName() != null) { final IRI taxonIRI = this.convertOBOIRI(taxon.getValidName().getID()); this.addClass( this.factory.getOWLNamedIndividual(taxonIRI), this.factory.getOWLClass(IRI.create(PHENOSCAPE.TAXON))); this.addPropertyAssertion( IRI.create(CDAO.HAS_EXTERNAL_REFERENCE), otu, this.factory.getOWLNamedIndividual(taxonIRI)); } if (StringUtils.isNotBlank(taxon.getComment())) { final OWLLiteral comment = factory.getOWLLiteral(taxon.getComment()); this.addAnnotation(OWLRDFVocabulary.RDFS_COMMENT.getIRI(), otu.getIRI(), comment); } for (Specimen specimen : taxon.getSpecimens()) { final OWLAnonymousIndividual owlSpecimen = this.factory.getOWLAnonymousIndividual(); this.addPropertyAssertion(IRI.create(DWC.HAS_SPECIMEN), otu, owlSpecimen); this.translateSpecimen(specimen, owlSpecimen); } }
@Test public void fragmentGetLocalNameTest() { assertEquals("path", OwlUtils.getLocalName(IRI.create("http://host/path#"))); assertEquals("f", OwlUtils.getLocalName(IRI.create("http://host/path#f"))); assertEquals("fragment", OwlUtils.getLocalName(IRI.create("http://host/path#fragment"))); assertEquals("fragment#", OwlUtils.getLocalName(IRI.create("http://host/path#fragment#"))); }
public void testIgnoreAnnotations() throws Exception { OWLOntologyManager man = getManager(); // OWLManager.createOWLOntologyManager(); OWLOntology ont = man.createOntology(); OWLDataFactory df = man.getOWLDataFactory(); OWLClass clsA = df.getOWLClass(IRI.create("http://ont.com#A")); OWLClass clsB = df.getOWLClass(IRI.create("http://ont.com#B")); OWLSubClassOfAxiom sca = df.getOWLSubClassOfAxiom(clsA, clsB); man.addAxiom(ont, sca); OWLAnnotationProperty rdfsComment = df.getRDFSComment(); OWLLiteral lit = df.getOWLLiteral("Hello world"); OWLAnnotationAssertionAxiom annoAx1 = df.getOWLAnnotationAssertionAxiom(rdfsComment, clsA.getIRI(), lit); man.addAxiom(ont, annoAx1); OWLAnnotationPropertyDomainAxiom annoAx2 = df.getOWLAnnotationPropertyDomainAxiom(rdfsComment, clsA.getIRI()); man.addAxiom(ont, annoAx2); OWLAnnotationPropertyRangeAxiom annoAx3 = df.getOWLAnnotationPropertyRangeAxiom(rdfsComment, clsB.getIRI()); man.addAxiom(ont, annoAx3); OWLAnnotationProperty myComment = df.getOWLAnnotationProperty(IRI.create("http://ont.com#myComment")); OWLSubAnnotationPropertyOfAxiom annoAx4 = df.getOWLSubAnnotationPropertyOfAxiom(myComment, rdfsComment); man.addAxiom(ont, annoAx4); reload(ont, new RDFXMLOntologyFormat()); reload(ont, new OWLXMLOntologyFormat()); reload(ont, new TurtleOntologyFormat()); reload(ont, new OWLFunctionalSyntaxOntologyFormat()); }
@Override public KuabaRepository load(String url) throws RepositoryLoadException { IRI iri; File f = new File(url); if (!url.contains(":/")) { iri = IRI.create(f); } else iri = IRI.create(url); try { OWLOntology inst = manager.loadOntologyFromOntologyDocument(iri); if (inst == null) throw new RepositoryLoadException(url, "Invalid Location."); if (inst.getOntologyID().getOntologyIRI() == null) { // An anonymous Ontology was loaded, and they are not supported by the Kuaba Subsystem manager.removeOntology(inst); throw new RepositoryLoadException(url, "It is probably an incompatible or malformed file."); } KuabaRepository repo = new OwlApiKuabaRepository(inst, manager.getOWLDataFactory()); repoMap.put(inst.getOntologyID().getOntologyIRI(), repo); fileMap.put(repo, f); return repo; } catch (OWLOntologyAlreadyExistsException ex) { OWLOntologyID id = ex.getOntologyID(); // System.err.println("Ontologia "+id.getOntologyIRI().toString()+" já está // carregada, esta será retornada."); return repoMap.get(id.getOntologyIRI()); } catch (OWLOntologyCreationException ex) { throw new RepositoryLoadException(url, "Invalid Location."); // Logger.getLogger(OwlApiFileGateway.class.getName()).log(Level.SEVERE, null, ex); } }
private void getPelletIncrementalClassifierRunTime(String baseOnt, String ontDir) throws Exception { System.out.println("Using Pellet Incremental Classifier..."); GregorianCalendar start = new GregorianCalendar(); File ontFile = new File(baseOnt); IRI documentIRI = IRI.create(ontFile); OWLOntology baseOntology = OWL.manager.loadOntology(documentIRI); IncrementalClassifier classifier = new IncrementalClassifier(baseOntology); classifier.classify(); System.out.println("Logical axioms: " + baseOntology.getLogicalAxiomCount()); System.out.println("Time taken for base ontology (millis): " + Util.getElapsedTime(start)); File ontDirPath = new File(ontDir); File[] allFiles = ontDirPath.listFiles(); OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); addTripsBaseOntologies(manager); int count = 1; for (File owlFile : allFiles) { IRI owlDocumentIRI = IRI.create(owlFile); OWLOntology ontology = manager.loadOntologyFromOntologyDocument(owlDocumentIRI); Set<OWLLogicalAxiom> axioms = ontology.getLogicalAxioms(); for (OWLLogicalAxiom axiom : axioms) OWL.manager.applyChange(new AddAxiom(baseOntology, axiom)); System.out.println("\nLogical axioms: " + baseOntology.getLogicalAxiomCount()); System.out.println(count + " file: " + owlFile.getName()); // System.out.println("" + count + " file: " + owlFile.getName()); // GregorianCalendar start2 = new GregorianCalendar(); classifier.classify(); // System.out.println("Time taken (millis): " + // Util.getElapsedTime(start2)); manager.removeOntology(ontology); count++; } System.out.println("\nTotal time taken (millis): " + Util.getElapsedTime(start)); }
public KuabaRepository createNewRepository(String url, File destination) { IRI iri; if (url == null) iri = IRI.generateDocumentIRI(); else iri = IRI.create(url); try { OWLOntology inst = manager.createOntology(iri); OWLImportsDeclaration imp = manager.getOWLDataFactory().getOWLImportsDeclaration(IRI.create(ONTOLOGY_URL)); AddImport addi = new AddImport(inst, imp); manager.applyChange(addi); KuabaRepository repo = new OwlApiKuabaRepository(inst, manager.getOWLDataFactory()); repoMap.put(inst.getOntologyID().getOntologyIRI(), repo); fileMap.put(repo, destination); TemplateGenerator.generateRootQuestion(repo); if (destination != null) this.save(repo); return repo; } catch (OWLOntologyCreationException ex) { Logger.getLogger(OwlApiFileGateway.class.getName()).log(Level.SEVERE, null, ex); } return null; }
/** * Initialization consists of: * * <ul> * <li>Setting aboxOntology, if not set - defaults to a new ontology using tbox.IRI as base. * Adds import to tbox. * <li>Setting queryOntology, if not set. Adds abox imports queryOntology declaration * </ul> * * @throws OWLOntologyCreationException */ private void init() throws OWLOntologyCreationException { // reasoner -> query -> abox -> tbox if (aboxOntology == null) { LOG.debug("Creating abox ontology. mgr = " + getOWLOntologyManager()); IRI ontologyIRI = IRI.create(tboxOntology.getOntologyID().getOntologyIRI() + "__abox"); aboxOntology = getOWLOntologyManager().getOntology(ontologyIRI); if (aboxOntology != null) { LOG.warn("Clearing existing abox ontology"); getOWLOntologyManager().removeOntology(aboxOntology); } aboxOntology = getOWLOntologyManager().createOntology(ontologyIRI); AddImport ai = new AddImport( aboxOntology, getOWLDataFactory() .getOWLImportsDeclaration(tboxOntology.getOntologyID().getOntologyIRI())); getOWLOntologyManager().applyChange(ai); } // add listener to abox to set modified flag OWLOntologyChangeBroadcastStrategy strategy = new SpecificOntologyChangeBroadcastStrategy(aboxOntology); OWLOntologyChangeListener listener = new OWLOntologyChangeListener() { @Override public void ontologiesChanged(List<? extends OWLOntologyChange> changes) throws OWLException { for (OWLOntologyChange owlOntologyChange : changes) { if (aboxOntology.equals(owlOntologyChange.getOntology())) { setAboxModified(true); } } } }; aboxOntology.getOWLOntologyManager().addOntologyChangeListener(listener, strategy); if (queryOntology == null) { // Imports: {q imports a, a imports t} LOG.debug("Creating query ontology"); IRI ontologyIRI = IRI.create(tboxOntology.getOntologyID().getOntologyIRI() + "__query"); queryOntology = getOWLOntologyManager().getOntology(ontologyIRI); if (queryOntology == null) { queryOntology = getOWLOntologyManager().createOntology(ontologyIRI); } AddImport ai = new AddImport( queryOntology, getOWLDataFactory() .getOWLImportsDeclaration(aboxOntology.getOntologyID().getOntologyIRI())); getOWLOntologyManager().applyChange(ai); } if (LOG.isDebugEnabled()) { LOG.debug(modelId + " manager(T) = " + tboxOntology.getOWLOntologyManager()); LOG.debug(modelId + " manager(A) = " + aboxOntology.getOWLOntologyManager()); LOG.debug(modelId + " manager(Q) = " + queryOntology.getOWLOntologyManager()); LOG.debug(modelId + " id(T) = " + tboxOntology.getOntologyID().getOntologyIRI()); LOG.debug(modelId + " id(A) = " + aboxOntology.getOntologyID().getOntologyIRI()); LOG.debug(modelId + " id(Q) = " + queryOntology.getOntologyID().getOntologyIRI()); } }
public OWLOntology getMergedOntology() { final IRI mergedOntologyIRI = IRI.create(queryOntology.getOntologyID().getDefaultDocumentIRI() + "-merged"); final OWLOntologyManager mm = controller.getOWLOntologyManager(); if (mm.contains(mergedOntologyIRI)) { return mm.getOntology(mergedOntologyIRI); } else { try { final OWLOntology mergedOntology = mm.createOntology(mergedOntologyIRI); mm.setOntologyFormat(mergedOntology, new RDFXMLOntologyFormat()); final String mergedOntologyFileName = mergedOntologyIRI .toURI() .toString() .substring(mergedOntologyIRI.toURI().toString().lastIndexOf("/") + 1) + ".owl"; mm.setOntologyDocumentIRI( mergedOntology, IRI.create( controller.getRuleSpec().getOutputDir().toURI() + "/" + mergedOntologyFileName)); mm.applyChange( new AddImport( mergedOntology, mm.getOWLDataFactory() .getOWLImportsDeclaration( queryOntology.getOntologyID().getDefaultDocumentIRI()))); return mergedOntology; } catch (OWLOntologyCreationException e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } } }
private void translatePhenotype(Phenotype phenotype, OWLClass owlPhenotype) { this.phenotypeToOWLMap.put(phenotype, owlPhenotype); if (phenotype.getEntity() == null || phenotype.getQuality() == null) { return; } final OWLObjectProperty inheresIn = this.factory.getOWLObjectProperty(IRI.create(OBO_REL.INHERES_IN)); final OWLClassExpression entity = this.convertOBOClass(phenotype.getEntity()); final OWLClassExpression qualityTerm = this.convertOBOClass(phenotype.getQuality()); final OWLClassExpression quality; if (phenotype.getRelatedEntity() != null) { final OWLClassExpression relatedEntity = this.convertOBOClass(phenotype.getRelatedEntity()); final OWLObjectProperty towards = this.factory.getOWLObjectProperty(IRI.create(OBO_REL.TOWARDS)); quality = this.factory.getOWLObjectIntersectionOf( qualityTerm, this.factory.getOWLObjectSomeValuesFrom(towards, relatedEntity)); } else { quality = qualityTerm; } final OWLClassExpression eq = this.factory.getOWLObjectIntersectionOf( quality, this.factory.getOWLObjectSomeValuesFrom(inheresIn, entity)); // TODO measurements, counts, etc. this.ontologyManager.addAxiom( this.ontology, this.factory.getOWLEquivalentClassesAxiom(owlPhenotype, eq)); }
private void translateState(State state, OWLNamedIndividual owlState) { this.stateToOWLMap.put(state, owlState); this.addClass(owlState, this.factory.getOWLClass(IRI.create(CDAO.STANDARD_STATE))); if (StringUtils.isNotBlank(state.getLabel())) { final OWLLiteral label = this.factory.getOWLLiteral(state.getLabel()); this.addAnnotation(OWLRDFVocabulary.RDFS_LABEL.getIRI(), owlState.getIRI(), label); } if (StringUtils.isNotBlank(state.getComment())) { final OWLLiteral comment = factory.getOWLLiteral(state.getComment()); this.addAnnotation(OWLRDFVocabulary.RDFS_COMMENT.getIRI(), owlState.getIRI(), comment); } int phenotypeIndex = 0; final OWLObjectProperty denotes = this.factory.getOWLObjectProperty(IRI.create(IAO.DENOTES)); for (Phenotype phenotype : state.getPhenotypes()) { phenotypeIndex++; final IRI phenotypeIRI = IRI.create(owlState.getIRI().toURI().toString() + "/phenotype/" + phenotypeIndex); final OWLClass owlPhenotype = this.factory.getOWLClass(phenotypeIRI); final OWLObjectAllValuesFrom denotesOnlyPhenotype = this.factory.getOWLObjectAllValuesFrom(denotes, owlPhenotype); this.ontologyManager.addAxiom( ontology, this.factory.getOWLClassAssertionAxiom(denotesOnlyPhenotype, owlState)); this.translatePhenotype(phenotype, owlPhenotype); } }
public void retrieveRoomsMap() { // this method retrieves the rooms to be insserted in the two rooms lists. // it deposits the retrieved data in a map where the key is the room name (String) and the value // is the corresponding entity IRI. // the above data structure will be used in passing the entity IRIs in the jess working memory // when constructing the facts // when the respective room name (String) is selected in the rooms list combo box. for (OWLSubClassOfAxiom scoAx : topIxOnt.getSubClassAxiomsForSuperClass( OWLFactory.getOWLClass( IRI.create( "http://www.semanticweb.org/ontologies/ptyxiaki_v0.6/2011/5/Ontology1308067064597.owl#Room")))) { String tmpS = scoAx.getSubClass().toString(); Set<OWLAnnotationAssertionAxiom> tmpAnnSet = topIxOnt.getAnnotationAssertionAxioms( IRI.create(tmpS.substring(tmpS.indexOf('<') + 1, tmpS.indexOf('>')))); for (OWLAnnotationAssertionAxiom aaAx : tmpAnnSet) { if (aaAx.getProperty() .toString() .equals( "<http://www.semanticweb.org/ontologies/ptyxiaki_v0.6/2011/5/Ontology1308067064597.owl#classID>")) { roomToIRI.put( aaAx.getValue().toString().substring(1, aaAx.getValue().toString().indexOf('^') - 1), tmpS.substring(tmpS.indexOf('<') + 1, tmpS.indexOf('>'))); } } } }
@Override public boolean hasDataPropertyStringRange(String dataPropertyName) { OWLDataFactory factory = manager.getOWLDataFactory(); OWLDataProperty dataProperty = factory.getOWLDataProperty(IRI.create(prefix + dataPropertyName)); OWLDatatype stringDatatype = factory.getOWLDatatype(IRI.create(prefix + "string")); OWLAxiom axiom = factory.getOWLDataPropertyRangeAxiom(dataProperty, stringDatatype); return localContext.containsAxiomIgnoreAnnotations(axiom); }
/** * Renders an IRI as a full IRI rather than as an IRI that represents an entity in the signature * of the imports closure of the active ontology. * * @param page The page that the IRI should be rendered into. * @param iri The IRI to be rendered. * @return A list of paragraphs that represent the rendering of the annotation value. */ private List<Paragraph> renderExternalIRI(Page page, IRI iri) { Paragraph paragraph; if (isLinkableAddress(iri)) { paragraph = page.addParagraph(iri.toString(), new HTTPLink(iri.toURI())); } else { paragraph = page.addParagraph(iri.toString()); } return Arrays.asList(paragraph); }
protected OWLClass getFreshQueryClass(String base) { long cnt = 0; String iriString = base; while (m_ontologyOperator.getOntology().containsClassInSignature(IRI.create(iriString))) { iriString = base + (cnt++); // not guaranteed to succeed.. however |long| amount of possibilities } return OWLManager.getOWLDataFactory().getOWLClass(IRI.create(iriString)); }
@Override public boolean hasDataPropertyDomain(String dataPropertyName, String domainName) { OWLDataFactory factory = manager.getOWLDataFactory(); OWLDataProperty dataProperty = factory.getOWLDataProperty(IRI.create(prefix + dataPropertyName)); OWLClass cls = factory.getOWLClass(IRI.create(prefix + domainName)); OWLAxiom axiom = factory.getOWLDataPropertyDomainAxiom(dataProperty, cls); return localContext.containsAxiomIgnoreAnnotations(axiom); }
@Override public boolean hasSubDataProperty(String subDataPropertyName, String dataPropertyName) { OWLDataFactory factory = manager.getOWLDataFactory(); OWLDataProperty dataProperty = factory.getOWLDataProperty(IRI.create(prefix + dataPropertyName)); OWLDataProperty subDataProperty = factory.getOWLDataProperty(IRI.create(prefix + subDataPropertyName)); OWLAxiom axiom = factory.getOWLSubDataPropertyOfAxiom(subDataProperty, dataProperty); return localContext.containsAxiomIgnoreAnnotations(axiom); }
@Test public void testNonCycle3() throws Exception { ParserWrapper parser = new ParserWrapper(); IRI iri = IRI.create(getResource("verification/self_references.obo").getAbsoluteFile()); OWLGraphWrapper graph = parser.parseToOWLGraph(iri.toString()); OntologyCheck check = new CycleCheck(); Collection<CheckWarning> warnings = check.check(graph, graph.getAllOWLObjects()); assertTrue(warnings.isEmpty()); }
@Override public void addDataPropertyDomain(String dataPropertyName, String domainName) { OWLDataFactory factory = manager.getOWLDataFactory(); OWLDataProperty dataProperty = factory.getOWLDataProperty(IRI.create(prefix + dataPropertyName)); OWLClass domain = factory.getOWLClass(IRI.create(prefix + domainName)); OWLAxiom axiom = factory.getOWLDataPropertyDomainAxiom(dataProperty, domain); axiom = changeManager.getAnnotatedAxiom(axiom); AddAxiom addAxiom = new AddAxiom(localContext, axiom); manager.applyChange(addAxiom); }
@Override public void addDataPropertyStringRange(String dataPropertyName) { OWLDataFactory factory = manager.getOWLDataFactory(); OWLDataProperty dataProperty = factory.getOWLDataProperty(IRI.create(prefix + dataPropertyName)); OWLDatatype stringDatatype = factory.getOWLDatatype(IRI.create(prefix + "string")); OWLAxiom axiom = factory.getOWLDataPropertyRangeAxiom(dataProperty, stringDatatype); axiom = changeManager.getAnnotatedAxiom(axiom); AddAxiom addAxiom = new AddAxiom(localContext, axiom); manager.applyChange(addAxiom); }
public static void insertSystemTriples( ISparulExecutor executor, String dataGraphName, String metaGraphName) throws Exception { logger.info("Inserting Annotation Properties."); List<RDFTriple> triples = new ArrayList<RDFTriple>(); MyVocabulary[] vocabs = { MyVocabulary.DC_MODIFIED, MyVocabulary.DBM_EDIT_LINK, MyVocabulary.DBM_PAGE_ID, MyVocabulary.DBM_REVISION, MyVocabulary.DBM_OAIIDENTIFIER }; for (MyVocabulary item : vocabs) { triples.add( new RDFTriple( new RDFResourceNode(item.getIRI()), new RDFResourceNode(OWLRDFVocabulary.RDF_TYPE.getIRI()), new RDFResourceNode(OWLRDFVocabulary.OWL_ANNOTATION_PROPERTY.getIRI()))); } List<RDFTriple> metaTriples = new ArrayList<RDFTriple>(); for (RDFTriple item : triples) { URI uri = RDFUtil.generateMD5HashUri("http://dbpedia.org/sysvocab/", item); RDFResourceNode reifier = new RDFResourceNode(IRI.create(uri)); metaTriples.add( new RDFTriple( reifier, new RDFResourceNode(OWLRDFVocabulary.OWL_ANNOTATED_SOURCE.getIRI()), item.getSubject())); metaTriples.add( new RDFTriple( reifier, new RDFResourceNode(OWLRDFVocabulary.OWL_ANNOTATED_PROPERTY.getIRI()), item.getProperty())); metaTriples.add( new RDFTriple( reifier, new RDFResourceNode(OWLRDFVocabulary.OWL_ANNOTATED_TARGET.getIRI()), item.getObject())); metaTriples.add( new RDFTriple( reifier, new RDFResourceNode(MyVocabulary.DBM_EXTRACTED_BY.getIRI()), new RDFResourceNode(IRI.create(TBoxExtractor.extractorUri)))); } executor.insert(metaTriples, metaGraphName); executor.insert(triples, dataGraphName); }
public KuabaRepository getSourceRepository(IRI iri) { IRI repoIRI = IRI.create(iri.getStart().replaceAll("#", "")); KuabaRepository repo = repoMap.get(repoIRI); try { if (repo == null) repo = this.load(repoIRI.toString()); } catch (Exception e) { System.err.println("Error when trying to get the source repository of " + iri.toString()); System.err.println("Source repository not found."); } return repo; }
public void retrieveSubObjectPropertyAxioms() { // this method is to perform a dual operation; it shall retrieve all the declarative object // properties categories' annotations, // thus filling the objPropCategories Set, while parallelly will, for each category entry // retrieve the object properties themselves // and adding their entries in the objPropEntries Set. Set<OWLSubObjectPropertyOfAxiom> tmpSet; OWLObjectPropertyExpression tempDeclarativePropertyClass = OWLFactory.getOWLObjectProperty(":DeclarativeProperty", topIxPrefixManager); tmpSet = topIxOnt.getObjectSubPropertyAxiomsForSuperProperty( tempDeclarativePropertyClass); // OWLFactory.getOWLObjectProperty(IRI.create("http://www.semanticweb.org/ontologies/ptyxiaki_v0.6/2011/5/Ontology1308067064597.owl#DeclarativeProperty"))); Set<OWLSubObjectPropertyOfAxiom> tmpSet2; Set<OWLAnnotationAssertionAxiom> tmpAnnSet1; Set<OWLAnnotationAssertionAxiom> tmpAnnSet2; // to become class variables. for (OWLSubObjectPropertyOfAxiom sopAx : tmpSet) { String tmpS = sopAx.getSubProperty().toString(); // categories... tmpAnnSet1 = topIxOnt.getAnnotationAssertionAxioms( IRI.create( tmpS.substring( 1, tmpS.indexOf('>')))); // this set only contains one annotation per entry for (OWLAnnotationAssertionAxiom aaAx : tmpAnnSet1) { String currentObjPropCatName = aaAx.getValue().toString().substring(1, aaAx.getValue().toString().indexOf('^') - 1); tmpSet2 = topIxOnt.getObjectSubPropertyAxiomsForSuperProperty( OWLFactory.getOWLObjectProperty(IRI.create(tmpS.substring(1, tmpS.length() - 1)))); for (OWLSubObjectPropertyOfAxiom sopAx2 : tmpSet2) { String tmpS2 = sopAx2.getSubProperty().toString(); tmpAnnSet2 = topIxOnt.getAnnotationAssertionAxioms( IRI.create(tmpS2.substring(1, tmpS2.length() - 1))); for (OWLAnnotationAssertionAxiom aaAx2 : tmpAnnSet2) { String currentObjPropEntryName = aaAx2 .getValue() .toString() .substring(1, aaAx2.getValue().toString().indexOf('^') - 1); propEntryNameToPropCatName.put(currentObjPropEntryName, currentObjPropCatName); propEntryNametoPropEntryIRI.put( currentObjPropEntryName, tmpS2.substring(1, tmpS2.length() - 1)); } } } } }
@Override public boolean equals(Object obj) { if (super.equals(obj)) { if (!(obj instanceof OWLDataProperty)) { return false; } IRI otherIRI = ((OWLDataProperty) obj).getIRI(); return otherIRI.equals(iri); } return false; }
public void applyChanges() { EntityCreationPreferences.setUseDefaultBaseIRI(iriBaseSpecifiedIri.isSelected()); try { IRI defaultBase = IRI.create(new URI(iriDefaultBaseField.getText())); EntityCreationPreferences.setDefaultBaseIRI(defaultBase); } catch (URISyntaxException e) { logger.error("Ignoring invalid base IRI ({})", iriDefaultBaseField.getText(), e); } if (hashButton.isSelected()) { EntityCreationPreferences.setDefaultSeparator(SEP_HASH); } else if (slashButton.isSelected()) { EntityCreationPreferences.setDefaultSeparator(SEP_SLASH); } else if (colonButton.isSelected()) { EntityCreationPreferences.setDefaultSeparator(SEP_COLON); } EntityCreationPreferences.setFragmentAutoGenerated(autoIDIriFragment.isSelected()); EntityCreationPreferences.setGenerateNameLabel(autoIDIriFragment.isSelected()); EntityCreationPreferences.setGenerateIDLabel(false); if (sameAsRendererLabelButton.isSelected()) { EntityCreationPreferences.setLabelDescriptorClass(MatchRendererLabelDescriptor.class); } if (customLabelButton.isSelected()) { EntityCreationPreferences.setLabelDescriptorClass(CustomLabelDescriptor.class); } EntityCreationPreferences.setNameLabelIRI(IRI.create(annotationIriLabel.getText())); Object lang = annotationLangSelector.getSelectedItem(); if (lang != null && !lang.equals("")) { EntityCreationPreferences.setNameLabelLang((String) lang); } else { EntityCreationPreferences.setNameLabelLang(null); } if (iterativeButton.isSelected()) { EntityCreationPreferences.setAutoIDGeneratorClass(IterativeAutoIDGenerator.class); } if (uniqueIdButton.isSelected()) { EntityCreationPreferences.setAutoIDGeneratorClass(UniqueIdGenerator.class); } EntityCreationPreferences.setAutoIDStart((Integer) autoIDStart.getValue()); EntityCreationPreferences.setAutoIDEnd((Integer) autoIDEnd.getValue()); EntityCreationPreferences.setAutoIDDigitCount((Integer) autoIDDigitCount.getValue()); EntityCreationPreferences.setPrefix(autoIDPrefix.getText()); EntityCreationPreferences.setSuffix(autoIDSuffix.getText()); EntityCreationPreferences.setSaveAutoIDStart(saveIterativeIds.isSelected()); }
public boolean assertDataPropertyInstance(String ind, String prop, int value) { OWLDataProperty dataProp = OWLFactory.getOWLDataProperty( IRI.create(topIxOnt.getOntologyID().getOntologyIRI().toString() + '#' + prop)); OWLDataPropertyAssertionAxiom tmpAx = OWLFactory.getOWLDataPropertyAssertionAxiom( dataProp, OWLFactory.getOWLNamedIndividual( IRI.create(topIxOnt.getOntologyID().getOntologyIRI().toString() + '#' + ind)), OWLFactory.getOWLLiteral(value)); manager.addAxiom(topIxOnt, tmpAx); return true; }
public void mergeAndCompare(String dirPath) throws Exception { File dir = new File(dirPath); File[] files = dir.listFiles(); OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntologyMerger merger = new OWLOntologyMerger(manager); for (File f : files) manager.loadOntologyFromOntologyDocument(IRI.create(f)); String s = "norm-merged-base+300.owl"; IRI iri = IRI.create(new File(s)); OWLOntology mergedOntology = merger.createMergedOntology(manager, iri); manager.saveOntology(mergedOntology, iri); System.out.println("Done creating merged ontology"); // precomputeAndCheckResults(mergedOntology); }
public static void callLogMap() throws OWLOntologyCreationException { String onto1_iri = "http://oaei.ontologymatching.org/tests/101/onto.rdf"; String onto2_iri = "http://oaei.ontologymatching.org/tests/304/onto.rdf"; OWLOntologyManager onto_manager = OWLManager.createOWLOntologyManager(); OWLOntology onto1 = onto_manager.loadOntology(IRI.create(onto1_iri)); OWLOntology onto2 = onto_manager.loadOntology(IRI.create(onto2_iri)); LogMap2_Matcher logmap2 = new LogMap2_Matcher(onto1, onto2); Set<MappingObjectStr> logmap2_mappings = logmap2.getLogmap2_Mappings(); for (MappingObjectStr mappingObject : logmap2_mappings) { System.out.println(" test des mots : " + mappingObject.toString()); } System.out.println("Num.mappings: " + logmap2_mappings.size()); }