@Test(expected = OWLOntologyDocumentAlreadyExistsException.class) public void testCreateDuplicatedDocumentIRI() throws Exception { OWLOntologyManager manager = Factory.getManager(); IRI ontologyIRI = IRI("http://www.semanticweb.org/ontologies/ontology"); IRI ontologyIRI2 = IRI("http://www.semanticweb.org/ontologies/ontology2"); IRI documentIRI = IRI("file:documentIRI"); manager.addIRIMapper(new SimpleIRIMapper(ontologyIRI, documentIRI)); manager.addIRIMapper(new SimpleIRIMapper(ontologyIRI2, documentIRI)); manager.createOntology(new OWLOntologyID(ontologyIRI)); manager.createOntology(new OWLOntologyID(ontologyIRI2)); }
/** * Serializes a batch of triples corresponding to a predicate into one file. Upper bound: * TRIPLE_LIMIT_PER_FILE. */ private static int serializeTripleBatch( OWLOntology ontology, QuestOWLIndividualAxiomIterator iterator, String filePrefix, String predicateName, int fileCount, String format) throws Exception { String fileName = filePrefix + fileCount + ".owl"; OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); // Main buffer OWLOntology aBox = manager.createOntology(IRI.create(predicateName)); // Add the signatures for (OWLDeclarationAxiom axiom : ontology.getAxioms(AxiomType.DECLARATION)) { manager.addAxiom(aBox, axiom); } int tripleCount = 0; while (iterator.hasNext() && (tripleCount < TRIPLE_LIMIT_PER_FILE)) { manager.addAxiom(aBox, iterator.next()); tripleCount++; } // BufferedOutputStream output = new BufferedOutputStream(new // FileOutputStream(outputPath.toFile())); // BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(output, "UTF-8")); BufferedWriter writer = new BufferedWriter(new FileWriter(fileName)); manager.saveOntology(aBox, getOntologyFormat(format), new WriterDocumentTarget(writer)); return tripleCount; }
@Test(expected = OWLOntologyAlreadyExistsException.class) public void testCreateDuplicateOntologyWithIRI() throws Exception { OWLOntologyManager manager = Factory.getManager(); IRI ontologyIRI = IRI("http://www.semanticweb.org/ontologies/ontology"); manager.createOntology(ontologyIRI); manager.createOntology(ontologyIRI); }
@Override protected boolean exec() { Timer t = new Timer(); if (ontology == null) { lSigExtractor = null; return true; } if (integrateRangesFirst) { OWLNormalization4MORe normalization = new OWLNormalization4MORe(ontology, true, false, false); Set<OWLAxiom> axioms = normalization.getNormalizedOntology(); try { OWLOntologyManager manager = ontology.getOWLOntologyManager(); ontology = manager.createOntology(); manager.addAxioms(ontology, axioms); } catch (OWLOntologyCreationException e) { e.printStackTrace(); lSigExtractor = null; return true; } } lSigExtractor.findLsignature(ontology, fragment); if (!integrateRangesFirst) stats.updateNelkAxioms(lSigExtractor.nAxiomsInFragment()); Logger_MORe.logDebug( t.duration() + "s to find Lsignature with integrateRangesFirst=" + integrateRangesFirst); return true; }
public void testIgnoreAnnotations() throws Exception { OWLOntologyManager man = getManager(); // OWLManager.createOWLOntologyManager(); OWLOntology ont = man.createOntology(); OWLDataFactory df = man.getOWLDataFactory(); OWLClass clsA = df.getOWLClass(IRI.create("http://ont.com#A")); OWLClass clsB = df.getOWLClass(IRI.create("http://ont.com#B")); OWLSubClassOfAxiom sca = df.getOWLSubClassOfAxiom(clsA, clsB); man.addAxiom(ont, sca); OWLAnnotationProperty rdfsComment = df.getRDFSComment(); OWLLiteral lit = df.getOWLLiteral("Hello world"); OWLAnnotationAssertionAxiom annoAx1 = df.getOWLAnnotationAssertionAxiom(rdfsComment, clsA.getIRI(), lit); man.addAxiom(ont, annoAx1); OWLAnnotationPropertyDomainAxiom annoAx2 = df.getOWLAnnotationPropertyDomainAxiom(rdfsComment, clsA.getIRI()); man.addAxiom(ont, annoAx2); OWLAnnotationPropertyRangeAxiom annoAx3 = df.getOWLAnnotationPropertyRangeAxiom(rdfsComment, clsB.getIRI()); man.addAxiom(ont, annoAx3); OWLAnnotationProperty myComment = df.getOWLAnnotationProperty(IRI.create("http://ont.com#myComment")); OWLSubAnnotationPropertyOfAxiom annoAx4 = df.getOWLSubAnnotationPropertyOfAxiom(myComment, rdfsComment); man.addAxiom(ont, annoAx4); reload(ont, new RDFXMLOntologyFormat()); reload(ont, new OWLXMLOntologyFormat()); reload(ont, new TurtleOntologyFormat()); reload(ont, new OWLFunctionalSyntaxOntologyFormat()); }
public OWLOntology getMergedOntology() { final IRI mergedOntologyIRI = IRI.create(queryOntology.getOntologyID().getDefaultDocumentIRI() + "-merged"); final OWLOntologyManager mm = controller.getOWLOntologyManager(); if (mm.contains(mergedOntologyIRI)) { return mm.getOntology(mergedOntologyIRI); } else { try { final OWLOntology mergedOntology = mm.createOntology(mergedOntologyIRI); mm.setOntologyFormat(mergedOntology, new RDFXMLOntologyFormat()); final String mergedOntologyFileName = mergedOntologyIRI .toURI() .toString() .substring(mergedOntologyIRI.toURI().toString().lastIndexOf("/") + 1) + ".owl"; mm.setOntologyDocumentIRI( mergedOntology, IRI.create( controller.getRuleSpec().getOutputDir().toURI() + "/" + mergedOntologyFileName)); mm.applyChange( new AddImport( mergedOntology, mm.getOWLDataFactory() .getOWLImportsDeclaration( queryOntology.getOntologyID().getDefaultDocumentIRI()))); return mergedOntology; } catch (OWLOntologyCreationException e) { // TODO Auto-generated catch block e.printStackTrace(); return null; } } }
public KuabaRepository createNewRepository(String url, File destination) { IRI iri; if (url == null) iri = IRI.generateDocumentIRI(); else iri = IRI.create(url); try { OWLOntology inst = manager.createOntology(iri); OWLImportsDeclaration imp = manager.getOWLDataFactory().getOWLImportsDeclaration(IRI.create(ONTOLOGY_URL)); AddImport addi = new AddImport(inst, imp); manager.applyChange(addi); KuabaRepository repo = new OwlApiKuabaRepository(inst, manager.getOWLDataFactory()); repoMap.put(inst.getOntologyID().getOntologyIRI(), repo); fileMap.put(repo, destination); TemplateGenerator.generateRootQuestion(repo); if (destination != null) this.save(repo); return repo; } catch (OWLOntologyCreationException ex) { Logger.getLogger(OwlApiFileGateway.class.getName()).log(Level.SEVERE, null, ex); } return null; }
@Test public void testCreateAnonymousOntology() throws Exception { OWLOntologyManager manager = Factory.getManager(); OWLOntology ontology = manager.createOntology(); assertNotNull("ontology should not be null", ontology); assertNotNull("ontology id should not be null", ontology.getOntologyID()); assertNull(ontology.getOntologyID().getDefaultDocumentIRI()); assertNull(ontology.getOntologyID().getOntologyIRI()); assertNull(ontology.getOntologyID().getVersionIRI()); assertNotNull("iri should not be null", manager.getOntologyDocumentIRI(ontology)); }
@Test public void testCreateOntologyWithIRI() throws Exception { OWLOntologyManager manager = Factory.getManager(); IRI ontologyIRI = IRI("http://www.semanticweb.org/ontologies/ontology"); OWLOntology ontology = manager.createOntology(ontologyIRI); assertNotNull("ontology should not be null", ontology); assertNotNull("ontology id should not be null", ontology.getOntologyID()); assertEquals(ontologyIRI, ontology.getOntologyID().getDefaultDocumentIRI()); assertEquals(ontologyIRI, ontology.getOntologyID().getOntologyIRI()); assertNull(ontology.getOntologyID().getVersionIRI()); assertEquals(ontologyIRI, manager.getOntologyDocumentIRI(ontology)); }
public LsignatureExtractorViaInverseRewritingLauncher( OWLOntology ontology, LogicFragment fragment) { this.ontology = null; try { manager = ontology.getOWLOntologyManager(); this.ontology = manager.createOntology(); manager.addAxioms(this.ontology, ontology.getAxioms()); } catch (OWLOntologyCreationException e) { e.printStackTrace(); } this.fragment = fragment; }
@Override public boolean test() { // Check if the ontology contains any axioms System.out.println("Number of axioms: " + ontology.getAxiomCount()); // Every ontology has a unique ID. System.out.println("Current Ontology ID: " + ontology.getOntologyID()); // test of CRUD // test of Create System.out.println("Number of children: " + factory.getAllChildInstances().size()); System.out.println("Create a new child "); factory.createChild("Nicola"); System.out.println("Number of children: " + factory.getAllChildInstances().size()); // test of Read Child c = factory.getChild("Nicola"); System.out.println(c.getOwlIndividual()); // TODO: test of Update // test of Delete c.delete(); System.out.println("Number of children: " + factory.getAllChildInstances().size()); // save ABox, TBox, RBox to separate files. try { ontlgAbox = manager.createOntology(ontology.getABoxAxioms(true)); ontlgTbox = manager.createOntology(ontology.getTBoxAxioms(true)); ontlgRbox = manager.createOntology(ontology.getRBoxAxioms(true)); } catch (OWLOntologyCreationException e) { // TODO Auto-generated catch block e.printStackTrace(); } try { manager.saveOntology(ontlgAbox, IRI.create(new File("individual/Abox.owl"))); manager.saveOntology(ontlgTbox, IRI.create(new File("individual/Tbox.owl"))); manager.saveOntology(ontlgRbox, IRI.create(new File("individual/Rbox.owl"))); } catch (OWLOntologyStorageException e) { // TODO Auto-generated catch block e.printStackTrace(); } return true; }
public LsignatureExtractorLauncher( OWLOntology ontology, LogicFragment fragment, boolean integrateRangesFirst) { this.ontology = null; try { OWLOntologyManager manager = ontology.getOWLOntologyManager(); this.ontology = manager.createOntology(); manager.addAxioms(this.ontology, ontology.getAxioms()); } catch (OWLOntologyCreationException e) { e.printStackTrace(); } this.fragment = fragment; this.integrateRangesFirst = integrateRangesFirst; }
@Test public void shouldTestNoResultsSubClassAxiom() throws Exception { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = manager.createOntology(); OPPLFactory opplFactory = new OPPLFactory(manager, ontology, null); ConstraintSystem constraintSystem = opplFactory.createConstraintSystem(); Variable<OWLClassExpression> x = constraintSystem.createVariable("?x", VariableTypeFactory.getCLASSVariableType(), null); OWLDataFactory dataFactory = manager.getOWLDataFactory(); OWLClass a = dataFactory.getOWLClass(IRI.create("A")); OWLClass b = dataFactory.getOWLClass(IRI.create("B")); OWLClass c = dataFactory.getOWLClass(IRI.create("C")); OWLObjectProperty p = dataFactory.getOWLObjectProperty(IRI.create("p")); manager.addAxiom( ontology, dataFactory.getOWLSubClassOfAxiom( dataFactory.getOWLObjectSomeValuesFrom(p, a), dataFactory.getOWLObjectSomeValuesFrom(p, b))); OWLSubClassOfAxiom axiom = dataFactory.getOWLSubClassOfAxiom( dataFactory.getOWLObjectSomeValuesFrom(p, dataFactory.getOWLClass(x.getIRI())), dataFactory.getOWLObjectSomeValuesFrom(p, c)); AxiomSolvability axiomSolvability = new NoResultsAxiomSolvability(constraintSystem, new AssertedModelQuerySolver(manager)); SolvabilitySearchNode node = axiomSolvability.getSolvabilitySearchNode(axiom, BindingNode.createNewEmptyBindingNode()); node.accept( new SolvabilitySearchNodeVisitor() { @Override public void visitUnsolvableSearchNode(UnsolvableSearchNode unsolvableSearchNode) { fail("Wrong type of solvability node: unsolvable, when expecting no solutions"); } @Override public void visitSolvedSearchNode(SolvedSearchNode solvedSearchNode) { fail("Wrong type of solvability node: solved, when expecting no solutions"); } @Override public void visitNoSolutionSolvableSearchNode( NoSolutionSolvableSearchNode noSolutionSolvableSearchNode) { // That's fine } @Override public void visitSolvableSearchNode(SolvableSearchNode solvableSearchNode) { fail("Wrong type of solvability node: no solution, when expecting no solutions"); } }); }
@Test public void testCreateOntologyWithIRIAndVersionIRIWithMapper() throws Exception { OWLOntologyManager manager = Factory.getManager(); IRI ontologyIRI = IRI("http://www.semanticweb.org/ontologies/ontology"); IRI versionIRI = IRI("http://www.semanticweb.org/ontologies/ontology/version"); IRI documentIRI = IRI("file:documentIRI"); SimpleIRIMapper mapper = new SimpleIRIMapper(versionIRI, documentIRI); manager.addIRIMapper(mapper); OWLOntology ontology = manager.createOntology(new OWLOntologyID(ontologyIRI, versionIRI)); assertNotNull("ontology should not be null", ontology); assertNotNull("ontology id should not be null", ontology.getOntologyID()); assertEquals(versionIRI, ontology.getOntologyID().getDefaultDocumentIRI()); assertEquals(ontologyIRI, ontology.getOntologyID().getOntologyIRI()); assertEquals(versionIRI, ontology.getOntologyID().getVersionIRI()); assertEquals(documentIRI, manager.getOntologyDocumentIRI(ontology)); }
@Test public void testNamedOntologyToString() throws Exception { OWLOntologyManager man = Factory.getManager(); IRI ontIRI = IRI("http://owlapi.sourceforge.net/ont"); OWLOntology ont = man.createOntology(ontIRI); String s = ont.toString(); String expected = "Ontology(" + ont.getOntologyID().toString() + ") [Axioms: " + ont.getAxiomCount() + " Logical Axioms: " + ont.getLogicalAxiomCount() + "]"; assertEquals(expected, s); }
private void createEmptyNotesOntology() { try { OWLOntologyManager notesOntologyManager = WebProtegeOWLManager.createOWLOntologyManager(); notesOntology = notesOntologyManager.createOntology(); IRI notesOntologyDocumentIRI = IRI.create(notesOntologyDocument); notesOntologyManager.setOntologyDocumentIRI(notesOntology, notesOntologyDocumentIRI); notesOntologyDocument.getParentFile().mkdirs(); BinaryOWLOntologyDocumentFormat notesOntologyDocumentFormat = new BinaryOWLOntologyDocumentFormat(); notesOntologyManager.saveOntology( notesOntology, notesOntologyDocumentFormat, notesOntologyDocumentIRI); } catch (OWLOntologyCreationException e) { throw new RuntimeException(e); } catch (OWLOntologyStorageException e) { throw new RuntimeException(e); } }
@Test public void testBadDataproperty() throws Exception { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = manager.createOntology(IRI.create(NS)); OWLClassExpression restriction = factory.getOWLDataSomeValuesFrom( P, factory.getOWLDatatype(XSDVocabulary.DURATION.getIRI())); OWLAxiom axiom = factory.getOWLSubClassOfAxiom(A, restriction); manager.addAxiom(ontology, axiom); assertTrue(ontology.containsDataPropertyInSignature(P.getIRI())); StringDocumentTarget t = new StringDocumentTarget(); manager.saveOntology(ontology, new RDFXMLOntologyFormat(), t); manager.removeOntology(ontology); ontology = manager.loadOntologyFromOntologyDocument(new StringDocumentSource(t.toString())); assertTrue(ontology.containsDataPropertyInSignature(P.getIRI())); }
private void setup() { try { ontology = ontologyManager.createOntology(IRI.generateDocumentIRI()); hybridKB = new NoHRHybridKB(new File(System.getenv("XSB_BIN_DIRECTORY")), ontology, profile); parser = new NoHRRecursiveDescentParser(hybridKB.getVocabulary()); } catch (final IPException e) { throw new RuntimeException(e); } catch (final OWLOntologyCreationException e) { throw new RuntimeException(e); } catch (final UnsupportedAxiomsException e) { throw new RuntimeException(e); } catch (final PrologEngineCreationException e) { throw new RuntimeException(e); } }
public KuabaRepository copy(KuabaRepository kr, String url, File destination) { IRI iri; if (url == null) iri = IRI.generateDocumentIRI(); else iri = IRI.create(url); try { OWLOntology model = (OWLOntology) kr.getModel(); OWLOntology inst = manager.createOntology(iri); OntologyMigrator migrator = new OntologyMigrator(manager, model, inst); migrator.performMigration(); EntityFindAndReplaceURIRenamer renamer = new EntityFindAndReplaceURIRenamer( manager, inst.getSignature(false), Collections.singleton(inst), model.getOntologyID().getOntologyIRI().toString(), iri.toString()); if (renamer.hasErrors()) System.err.println("ERRO durante a cópia - rename"); manager.applyChanges(renamer.getChanges()); KuabaRepository repo = new OwlApiKuabaRepository(inst, manager.getOWLDataFactory()); repoMap.put(inst.getOntologyID().getOntologyIRI(), repo); fileMap.put(repo, destination); return repo; } catch (OWLOntologyCreationException ex) { System.err.println("ERRO em copy"); Logger.getLogger(OwlApiFileGateway.class.getName()).log(Level.SEVERE, null, ex); } return null; }
public Set<OWLClass> getRootUnsatisfiableClasses() { // StructureBasedRootClassFinder srd = new StructureBasedRootClassFinder(this.baseReasoner); StructuralRootDerivedReasoner srd = new StructuralRootDerivedReasoner(this.manager, this.baseReasoner, this.reasonerFactory); Set<OWLClass> estimatedRoots = srd.getRootUnsatisfiableClasses(); this.cls2JustificationMap = new HashMap<OWLClass, Set<Explanation>>(); Set<OWLAxiom> allAxioms = new HashSet<OWLAxiom>(); for (OWLOntology ont : this.baseReasoner.getRootOntology().getImportsClosure()) { allAxioms.addAll(ont.getLogicalAxioms()); } for (OWLClass cls : estimatedRoots) { this.cls2JustificationMap.put(cls, new HashSet<Explanation>()); System.out.println("POTENTIAL ROOT: " + cls); } System.out.println("Finding real roots from " + estimatedRoots.size() + " estimated roots"); int done = 0; this.roots.addAll(estimatedRoots); for (OWLClass estimatedRoot : estimatedRoots) { try { PelletExplanationGenerator gen = new PelletExplanationGenerator(manager.createOntology(allAxioms)); OWLDataFactory df = this.manager.getOWLDataFactory(); Set<Explanation> expls = gen.getExplanations(df.getOWLSubClassOfAxiom(estimatedRoot, df.getOWLNothing())); cls2JustificationMap.get(estimatedRoot).addAll(expls); ++done; System.out.println("Done " + done); } catch (OWLOntologyCreationException e) { e.printStackTrace(); } } for (OWLClass clsA : estimatedRoots) { for (OWLClass clsB : estimatedRoots) if (!clsA.equals(clsB)) { Set<Explanation> clsAExpls = cls2JustificationMap.get(clsA); Set<Explanation> clsBExpls = cls2JustificationMap.get(clsB); boolean clsARootForClsB = false; boolean clsBRootForClsA = false; for (Explanation clsAExpl : clsAExpls) { for (Explanation clsBExpl : clsBExpls) if (isRootFor(clsAExpl, clsBExpl)) { clsARootForClsB = true; } else if (isRootFor(clsBExpl, clsAExpl)) { clsBRootForClsA = true; } } Explanation clsAExpl; if ((!clsARootForClsB) || (!clsBRootForClsA)) if (clsARootForClsB) { this.roots.remove(clsB); } else if (clsBRootForClsA) this.roots.remove(clsA); } } OWLClass clsA; return this.roots; }
public OWLOntology findLsignature( OWLOntology ontology, LogicFragment fragment, Statistics stats) { Timer t = new Timer(); this.stats = stats; Logger_MORe.logInfo("extracting " + fragment.toString() + "-signature"); OWLOntology ret = null; OWLOntologyManager manager = ontology.getOWLOntologyManager(); try { ret = manager.createOntology(); manager.addAxioms(ret, ontology.getAxioms()); } catch (OWLOntologyCreationException e) { e.printStackTrace(); } lSignatureClasses = new HashSet<OWLClass>(); lSignatureOther = new HashSet<OWLEntity>(); compSignatureClasses = new HashSet<OWLClass>(); compSignatureOther = new HashSet<OWLEntity>(); LsignatureExtractorLauncher elkSignatureExtractorLauncher = null; LsignatureExtractorLauncher elkSignatureExtractorIntegratingRangesLauncher = null; LsignatureExtractorViaInverseRewritingLauncher elkSignatureExtractorRewritingInversesLauncher = null; ForkJoinPool executor = new ForkJoinPool(); elkSignatureExtractorLauncher = new LsignatureExtractorLauncher(ontology, LogicFragment.ELK, false); executor.execute(elkSignatureExtractorLauncher); if (ret != null) { // otherwise we have nowhere to return the axioms in the normalised ontologies necessary to // really classify all the extra classses in the lSignature if (rewriteInverses) { elkSignatureExtractorRewritingInversesLauncher = new LsignatureExtractorViaInverseRewritingLauncher(ontology, LogicFragment.ELK); executor.execute(elkSignatureExtractorRewritingInversesLauncher); } if (integrateRanges) { elkSignatureExtractorIntegratingRangesLauncher = new LsignatureExtractorLauncher(ontology, LogicFragment.ELK, true); executor.execute(elkSignatureExtractorIntegratingRangesLauncher); } // check the output of the normal ELKsignature and cancel the other threads if the lSig is the // whole signature initialiseLsignature((LsignatureExtractor) elkSignatureExtractorLauncher.join()); if (compSignatureClasses.isEmpty()) cancelTasks( elkSignatureExtractorIntegratingRangesLauncher, elkSignatureExtractorRewritingInversesLauncher); else { if (elkSignatureExtractorRewritingInversesLauncher != null && extendLsignature( (LsignatureExtractor) elkSignatureExtractorRewritingInversesLauncher.join()) > 0) { manager.addAxioms( ret, ((LsignatureExtractorViaInverseRewritingLauncher) elkSignatureExtractorRewritingInversesLauncher) .getOntology() .getAxioms()); } if (compSignatureClasses.isEmpty()) cancelTasks(elkSignatureExtractorRewritingInversesLauncher); else if (elkSignatureExtractorIntegratingRangesLauncher != null && extendLsignature( (LsignatureExtractor) elkSignatureExtractorIntegratingRangesLauncher.join()) > 0) { manager.addAxioms( ret, ((LsignatureExtractorLauncher) elkSignatureExtractorIntegratingRangesLauncher) .getOntology() .getAxioms()); } } stats.updateLsignatureSize(lSignatureClasses.size(), true); } else { ret = ontology; initialiseLsignature((LsignatureExtractor) elkSignatureExtractorLauncher.join()); } Logger_MORe.logInfo(lSignatureClasses.size() + "classes in lSignature"); Logger_MORe.logDebug(lSignatureClasses.toString()); Logger_MORe.logInfo(compSignatureClasses.size() + "classes in compSignature"); // might be a good idea to try to isolate extra axioms in the normalisation/rewriting - is this // possible/worth the effort? // check the order in which we try to extend the lSignature with each of the rewritten // ontologies and consider if one may be better that the other Logger_MORe.logDebug(t.duration() + "s to find Lsignature"); return ret; }
@Override protected boolean exec() { Timer t = new Timer(); if (ontology == null) { extractor = null; return true; } IRI iri = IRI.create("http://www.cs.ox.ac.uk/isg/tools/MORe/ontologies/inverseRewritingModule.owl"); extractor.findLsignature(ontology, LogicFragment.SHOIQ); if (containsNonInternalClasses( extractor .getCompSignature())) { // then the ontology goes beyond SHOIQ and we need to work // with a SHOIQ module rather than the whole ontology Set<OWLEntity> aux = getNonInternalClasses(extractor.getLsignature()); if (aux.isEmpty()) { extractor = null; Logger_MORe.logDebug( t.duration() + "s to find Lsignature with inverseRewriting (failed - empty SHOIQ-signature)"); return true; } SyntacticLocalityModuleExtractor moduleExtractor = new SyntacticLocalityModuleExtractor(manager, ontology, ModuleType.BOT); try { // ontology = manager.createOntology(iri); // manager.addAxioms(ontology, moduleExtractor.extract(aux)); ontology = moduleExtractor.extractAsOntology(aux, iri); } catch (OWLOntologyCreationException e1) { extractor = null; e1.printStackTrace(); Logger_MORe.logDebug( t.duration() + "s to find Lsignature with inverseRewriting (failed - exception creating a SHOIQ module)"); return true; } } // if we get this far then we have a nonempty ontology (maybe module) that we need to // normalize and then rewrite OWLNormalization4MORe normalization = new OWLNormalization4MORe(ontology, true, true, true); Rewriter rewriter = new Rewriter(normalization.getNormalizedOntology(), normalization.getSortedGCIs()); if (manager.contains(iri)) manager.removeOntology(ontology); Set<OWLAxiom> rewrittenAxioms = rewriter.getRewrittenOntology(); if (!rewriter.anyRewrittenRoles()) { extractor = null; Logger_MORe.logDebug( t.duration() + "s to find Lsignature with inverseRewriting (failed - could not rewrite any roles)"); return true; } try { ontology = manager.createOntology(); manager.addAxioms(ontology, rewrittenAxioms); extractor = new LsignatureExtractor_reducedGreedyness(); extractor.findLsignature(ontology, fragment); } catch (OWLOntologyCreationException e1) { extractor = null; e1.printStackTrace(); Logger_MORe.logDebug( t.duration() + "s to find Lsignature with inverseRewriting (failed - exception creating ontology for rewritten axioms)"); return true; } Logger_MORe.logDebug(t.duration() + "s to find Lsignature with inverseRewriting"); return true; }
public void runWithSingleFile() { BufferedOutputStream output = null; BufferedWriter writer = null; try { final long startTime = System.currentTimeMillis(); if (outputFile != null) { output = new BufferedOutputStream(new FileOutputStream(outputFile)); } else { output = new BufferedOutputStream(System.out); } writer = new BufferedWriter(new OutputStreamWriter(output, "UTF-8")); OBDAModel obdaModel = loadMappingFile(mappingFile); OWLOntology ontology = null; OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLAPI3Materializer materializer = null; if (owlFile != null) { // Loading the OWL ontology from the file as with normal OWLReasoners ontology = manager.loadOntologyFromOntologyDocument((new File(owlFile))); if (disableReasoning) { /* * when reasoning is disabled, we extract only the declaration assertions for the vocabulary */ ontology = extractDeclarations(manager, ontology); } Ontology onto = OWLAPI3TranslatorUtility.translate(ontology); obdaModel.declareAll(onto.getVocabulary()); materializer = new OWLAPI3Materializer(obdaModel, onto, DO_STREAM_RESULTS); } else { ontology = manager.createOntology(); materializer = new OWLAPI3Materializer(obdaModel, DO_STREAM_RESULTS); } // OBDAModelSynchronizer.declarePredicates(ontology, obdaModel); QuestOWLIndividualAxiomIterator iterator = materializer.getIterator(); while (iterator.hasNext()) manager.addAxiom(ontology, iterator.next()); OWLOntologyFormat ontologyFormat = getOntologyFormat(format); manager.saveOntology(ontology, ontologyFormat, new WriterDocumentTarget(writer)); System.err.println("NR of TRIPLES: " + materializer.getTriplesCount()); System.err.println("VOCABULARY SIZE (NR of QUERIES): " + materializer.getVocabularySize()); materializer.disconnect(); if (outputFile != null) output.close(); final long endTime = System.currentTimeMillis(); final long time = endTime - startTime; System.out.println("Elapsed time to materialize: " + time + " {ms}"); } catch (Exception e) { System.out.println("Error materializing ontology:"); e.printStackTrace(); } }
/** * This method has no conversion calls, to it can be invoked by subclasses that wish to modify it * afterwards. * * <p>FIXME not merging yet FIXME not including imported ontologies unless they are merged * *before* storage. * * @param merge * @return */ protected OWLOntology exportToOWLOntology(boolean merge, IRI prefix) { long before = System.currentTimeMillis(); // Create a new ontology OWLOntology root; OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager(); IRI iri = IRI.create(prefix + _id); try { root = ontologyManager.createOntology(iri); } catch (OWLOntologyAlreadyExistsException e) { // It should be impossible, but just in case. ontologyManager.removeOntology(ontologyManager.getOntology(iri)); try { root = ontologyManager.createOntology(iri); } catch (OWLOntologyAlreadyExistsException e1) { root = ontologyManager.getOntology(iri); } catch (OWLOntologyCreationException e1) { log.error("Failed to assemble root ontology for scope " + iri, e); root = null; } } catch (OWLOntologyCreationException e) { log.error("Failed to assemble root ontology for scope " + _id, e); root = null; } // Add the import declarations for directly managed ontologies. if (root != null) { if (merge) { final Set<OWLOntology> set = new HashSet<OWLOntology>(); log.debug("Merging {} with its imports.", root); set.add(root); for (OWLOntologyID ontologyId : managedOntologies) { log.debug("Merging {} with {}.", ontologyId, root); set.add(getOntology(ontologyId, OWLOntology.class, true)); } OWLOntologySetProvider provider = new OWLOntologySetProvider() { @Override public Set<OWLOntology> getOntologies() { return set; } }; OWLOntologyMerger merger = new OWLOntologyMerger(provider); try { root = merger.createMergedOntology(OWLManager.createOWLOntologyManager(), iri); } catch (OWLOntologyCreationException e) { log.error("Failed to merge imports for ontology " + iri, e); root = null; } } else { // Add the import declarations for directly managed ontologies. List<OWLOntologyChange> changes = new LinkedList<OWLOntologyChange>(); OWLDataFactory df = ontologyManager.getOWLDataFactory(); String base = prefix + getID(); for (int i = 0; i < backwardPathLength; i++) base = URIUtils.upOne(URI.create(base)).toString(); base += "/"; // The key set of managedOntologies contains the ontology IRIs, not their storage keys. for (OWLOntologyID ontologyId : managedOntologies) { // XXX some day the versionIRI will be the only physical reference for the ontology IRI physIRI = IRI.create(base + OntologyUtils.encode(ontologyId)); changes.add(new AddImport(root, df.getOWLImportsDeclaration(physIRI))); } ontologyManager.applyChanges(changes); } } log.debug("OWL export of {} completed in {} ms.", getID(), System.currentTimeMillis() - before); return root; }
private OWLOntology parseWithReasoner(OWLOntologyManager manager, OWLOntology ontology) { try { PelletOptions.load(new URL("http://" + cssLocation + "pellet.properties")); PelletReasoner reasoner = PelletReasonerFactory.getInstance().createReasoner(ontology); reasoner.getKB().prepare(); List<InferredAxiomGenerator<? extends OWLAxiom>> generators = new ArrayList<InferredAxiomGenerator<? extends OWLAxiom>>(); generators.add(new InferredSubClassAxiomGenerator()); generators.add(new InferredClassAssertionAxiomGenerator()); generators.add(new InferredDisjointClassesAxiomGenerator()); generators.add(new InferredEquivalentClassAxiomGenerator()); generators.add(new InferredEquivalentDataPropertiesAxiomGenerator()); generators.add(new InferredEquivalentObjectPropertyAxiomGenerator()); generators.add(new InferredInverseObjectPropertiesAxiomGenerator()); generators.add(new InferredPropertyAssertionGenerator()); generators.add(new InferredSubDataPropertyAxiomGenerator()); generators.add(new InferredSubObjectPropertyAxiomGenerator()); InferredOntologyGenerator iog = new InferredOntologyGenerator(reasoner, generators); OWLOntologyID id = ontology.getOntologyID(); Set<OWLImportsDeclaration> declarations = ontology.getImportsDeclarations(); Set<OWLAnnotation> annotations = ontology.getAnnotations(); Map<OWLEntity, Set<OWLAnnotationAssertionAxiom>> entityAnnotations = new HashMap<OWLEntity, Set<OWLAnnotationAssertionAxiom>>(); for (OWLClass aEntity : ontology.getClassesInSignature()) { entityAnnotations.put(aEntity, aEntity.getAnnotationAssertionAxioms(ontology)); } for (OWLObjectProperty aEntity : ontology.getObjectPropertiesInSignature()) { entityAnnotations.put(aEntity, aEntity.getAnnotationAssertionAxioms(ontology)); } for (OWLDataProperty aEntity : ontology.getDataPropertiesInSignature()) { entityAnnotations.put(aEntity, aEntity.getAnnotationAssertionAxioms(ontology)); } for (OWLNamedIndividual aEntity : ontology.getIndividualsInSignature()) { entityAnnotations.put(aEntity, aEntity.getAnnotationAssertionAxioms(ontology)); } for (OWLAnnotationProperty aEntity : ontology.getAnnotationPropertiesInSignature()) { entityAnnotations.put(aEntity, aEntity.getAnnotationAssertionAxioms(ontology)); } for (OWLDatatype aEntity : ontology.getDatatypesInSignature()) { entityAnnotations.put(aEntity, aEntity.getAnnotationAssertionAxioms(ontology)); } manager.removeOntology(ontology); OWLOntology inferred = manager.createOntology(id); iog.fillOntology(manager, inferred); for (OWLImportsDeclaration decl : declarations) { manager.applyChange(new AddImport(inferred, decl)); } for (OWLAnnotation ann : annotations) { manager.applyChange(new AddOntologyAnnotation(inferred, ann)); } for (OWLClass aEntity : inferred.getClassesInSignature()) { applyAnnotations(aEntity, entityAnnotations, manager, inferred); } for (OWLObjectProperty aEntity : inferred.getObjectPropertiesInSignature()) { applyAnnotations(aEntity, entityAnnotations, manager, inferred); } for (OWLDataProperty aEntity : inferred.getDataPropertiesInSignature()) { applyAnnotations(aEntity, entityAnnotations, manager, inferred); } for (OWLNamedIndividual aEntity : inferred.getIndividualsInSignature()) { applyAnnotations(aEntity, entityAnnotations, manager, inferred); } for (OWLAnnotationProperty aEntity : inferred.getAnnotationPropertiesInSignature()) { applyAnnotations(aEntity, entityAnnotations, manager, inferred); } for (OWLDatatype aEntity : inferred.getDatatypesInSignature()) { applyAnnotations(aEntity, entityAnnotations, manager, inferred); } return inferred; } catch (FileNotFoundException e1) { return ontology; } catch (MalformedURLException e1) { return ontology; } catch (IOException e1) { return ontology; } catch (OWLOntologyCreationException e) { return ontology; } }
public static void main(String[] args) throws OWLOntologyCreationException, IOException, OWLOntologyStorageException { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = manager.createOntology(ontologyIRI); OWLDataFactory factory = manager.getOWLDataFactory(); // create basic concepts with their IRIs currentPhase = factory.getOWLClass(IRI.create(ontologyIRI + "#CurrentPhase")); previousPhase = factory.getOWLObjectProperty(IRI.create(ontologyIRI + "#previousPhase")); detectedPhase = factory.getOWLClass(IRI.create(ontologyIRI + "#DetectedPhase")); // init the currentPhase-class with the "start"-individual OWLNamedIndividual start1 = factory.getOWLNamedIndividual(IRI.create(ontologyIRI + "start")); manager.addAxiom(ontology, factory.getOWLClassAssertionAxiom(currentPhase, start1)); // create rule1 SWRLRule rulePP = buildRule("port_placement", "port", "unknown", "place"); manager.applyChange(new AddAxiom(ontology, rulePP)); // create rule2 SWRLRule ruleMob1 = buildRule("mobilisation", "atraumatic_grasper", "gallbladder_fundus", "grasp"); manager.applyChange(new AddAxiom(ontology, ruleMob1)); SWRLRule ruleMob2 = buildRule("mobilisation", "atraumatic_grasper", "gastrocolic_ligament", "grasp"); manager.applyChange(new AddAxiom(ontology, ruleMob2)); SWRLRule ruleDis1 = buildRule("dissection", "atraumatic_grasper", "hepatoduodenal_ligament", "grasp"); manager.applyChange(new AddAxiom(ontology, ruleDis1)); SWRLRule ruleDis2 = buildRule("dissection", "atraumatic_grasper", "hepatoduodenal_ligament", "lift"); manager.applyChange(new AddAxiom(ontology, ruleDis2)); SWRLRule ruleDis3 = buildRule("dissection", "instrument", "calot_triangle", "instrumental_property"); manager.applyChange(new AddAxiom(ontology, ruleDis3)); SWRLRule ruleDis4 = buildRule("dissection", "sharp_instrument", "hepatoduodenal_ligament", "cutting_action"); manager.applyChange(new AddAxiom(ontology, ruleDis4)); SWRLRule ruleResCA1 = buildRule("resection_cystic_artery", "sharp_instrument", "cystic_artery", "cutting_action"); manager.applyChange(new AddAxiom(ontology, ruleResCA1)); SWRLRule ruleResCA2 = buildRule("resection_cystic_artery", "clip", "cystic_artery", "clipping"); manager.applyChange(new AddAxiom(ontology, ruleResCA2)); SWRLRule ruleResCD1 = buildRule("resection_cystic_duct", "sharp_instrument", "cystic_duct", "cutting_action"); manager.applyChange(new AddAxiom(ontology, ruleResCD1)); SWRLRule ruleResCD2 = buildRule("resection_cystic_duct", "clip", "cystic_duct", "clipping"); manager.applyChange(new AddAxiom(ontology, ruleResCD2)); SWRLRule ruleResG1 = buildRule( "resection_gallbladder", "sharp_instrument", "gallbladder_serosa", "cutting_action"); manager.applyChange(new AddAxiom(ontology, ruleResG1)); SWRLRule ruleResG2 = buildRule("resection_gallbladder", "sharp_instrument", "gallbladder_serosa", "dissect"); manager.applyChange(new AddAxiom(ontology, ruleResG2)); SWRLRule ruleResG3 = buildRule("resection_gallbladder", "sharp_instrument", "gallbladder", "cutting_action"); manager.applyChange(new AddAxiom(ontology, ruleResG3)); SWRLRule ruleResG4 = buildRule( "resection_gallbladder", "sharp_instrument", "gallbladder_liverbed", "cutting_action"); manager.applyChange(new AddAxiom(ontology, ruleResG4)); SWRLRule ruleClose1 = buildRule("closure", "specimen_bag", "unknown", "instrumental_property"); manager.applyChange(new AddAxiom(ontology, ruleClose1)); SWRLRule ruleClose2 = buildRule("closure", "specimen_bag", "organ", "instrumental_property"); manager.applyChange(new AddAxiom(ontology, ruleClose2)); // Strange rules with the instrument "drain". It should be called "drainage" as by the next two // rules. // SWRLRule ruleDrain1 = buildRule("drain", "drain", "unknown", "putting_action"); // manager.applyChange(new AddAxiom(ontology, ruleDrain1)); // SWRLRule ruleDrain2 = buildRule("drain", "drain", "unknown", "instrumental_property"); // manager.applyChange(new AddAxiom(ontology, ruleDrain2)); SWRLRule ruleDrainage1 = buildRule("drain", "drainage", "unknown", "instrumental_property"); manager.applyChange(new AddAxiom(ontology, ruleDrainage1)); SWRLRule ruleDrainage2 = buildRule("drain", "drainage", "organ", "instrumental_property"); manager.applyChange(new AddAxiom(ontology, ruleDrainage2)); // save in RDF/XML format File output = new File("D:/DiplArbeit/OurWork/USECASE/RULES/CCE_rules.owl"); output.createNewFile(); manager.saveOntology(ontology, IRI.create(output.toURI())); }