/** * Initialization consists of: * * <ul> * <li>Setting aboxOntology, if not set - defaults to a new ontology using tbox.IRI as base. * Adds import to tbox. * <li>Setting queryOntology, if not set. Adds abox imports queryOntology declaration * </ul> * * @throws OWLOntologyCreationException */ private void init() throws OWLOntologyCreationException { // reasoner -> query -> abox -> tbox if (aboxOntology == null) { LOG.debug("Creating abox ontology. mgr = " + getOWLOntologyManager()); IRI ontologyIRI = IRI.create(tboxOntology.getOntologyID().getOntologyIRI() + "__abox"); aboxOntology = getOWLOntologyManager().getOntology(ontologyIRI); if (aboxOntology != null) { LOG.warn("Clearing existing abox ontology"); getOWLOntologyManager().removeOntology(aboxOntology); } aboxOntology = getOWLOntologyManager().createOntology(ontologyIRI); AddImport ai = new AddImport( aboxOntology, getOWLDataFactory() .getOWLImportsDeclaration(tboxOntology.getOntologyID().getOntologyIRI())); getOWLOntologyManager().applyChange(ai); } // add listener to abox to set modified flag OWLOntologyChangeBroadcastStrategy strategy = new SpecificOntologyChangeBroadcastStrategy(aboxOntology); OWLOntologyChangeListener listener = new OWLOntologyChangeListener() { @Override public void ontologiesChanged(List<? extends OWLOntologyChange> changes) throws OWLException { for (OWLOntologyChange owlOntologyChange : changes) { if (aboxOntology.equals(owlOntologyChange.getOntology())) { setAboxModified(true); } } } }; aboxOntology.getOWLOntologyManager().addOntologyChangeListener(listener, strategy); if (queryOntology == null) { // Imports: {q imports a, a imports t} LOG.debug("Creating query ontology"); IRI ontologyIRI = IRI.create(tboxOntology.getOntologyID().getOntologyIRI() + "__query"); queryOntology = getOWLOntologyManager().getOntology(ontologyIRI); if (queryOntology == null) { queryOntology = getOWLOntologyManager().createOntology(ontologyIRI); } AddImport ai = new AddImport( queryOntology, getOWLDataFactory() .getOWLImportsDeclaration(aboxOntology.getOntologyID().getOntologyIRI())); getOWLOntologyManager().applyChange(ai); } if (LOG.isDebugEnabled()) { LOG.debug(modelId + " manager(T) = " + tboxOntology.getOWLOntologyManager()); LOG.debug(modelId + " manager(A) = " + aboxOntology.getOWLOntologyManager()); LOG.debug(modelId + " manager(Q) = " + queryOntology.getOWLOntologyManager()); LOG.debug(modelId + " id(T) = " + tboxOntology.getOntologyID().getOntologyIRI()); LOG.debug(modelId + " id(A) = " + aboxOntology.getOntologyID().getOntologyIRI()); LOG.debug(modelId + " id(Q) = " + queryOntology.getOntologyID().getOntologyIRI()); } }
@Override protected boolean exec() { Timer t = new Timer(); if (ontology == null) { lSigExtractor = null; return true; } if (integrateRangesFirst) { OWLNormalization4MORe normalization = new OWLNormalization4MORe(ontology, true, false, false); Set<OWLAxiom> axioms = normalization.getNormalizedOntology(); try { OWLOntologyManager manager = ontology.getOWLOntologyManager(); ontology = manager.createOntology(); manager.addAxioms(ontology, axioms); } catch (OWLOntologyCreationException e) { e.printStackTrace(); lSigExtractor = null; return true; } } lSigExtractor.findLsignature(ontology, fragment); if (!integrateRangesFirst) stats.updateNelkAxioms(lSigExtractor.nAxiomsInFragment()); Logger_MORe.logDebug( t.duration() + "s to find Lsignature with integrateRangesFirst=" + integrateRangesFirst); return true; }
public DLOntology_withMaps preprocessAndClausify(OWLOntology rootOntology) { OWLDataFactory factory = rootOntology.getOWLOntologyManager().getOWLDataFactory(); String ontologyIRI = rootOntology.getOntologyID().getDefaultDocumentIRI() == null ? "urn:hermit:kb" : rootOntology.getOntologyID().getDefaultDocumentIRI().toString(); Collection<OWLOntology> importClosure = rootOntology.getImportsClosure(); OWLAxioms_withMaps axioms = new OWLAxioms_withMaps(); OWLNormalization_withMaps normalization = new OWLNormalization_withMaps(factory, axioms, 0, m_datatypeManager); for (OWLOntology ontology : importClosure) normalization.processOntology(ontology); BuiltInPropertyManager_withMaps builtInPropertyManager = new BuiltInPropertyManager_withMaps(factory); builtInPropertyManager.axiomatizeBuiltInPropertiesAsNeeded(axioms); ObjectPropertyInclusionManager_withMaps objectPropertyInclusionManager = new ObjectPropertyInclusionManager_withMaps(axioms); // now object property inclusion manager added all non-simple properties to // axioms.m_complexObjectPropertyExpressions // now that we know which roles are non-simple, we can decide which negative object property // assertions have to be // expressed as concept assertions so that transitivity rewriting applies properly. objectPropertyInclusionManager.rewriteNegativeObjectPropertyAssertions( factory, axioms, normalization.getDefinitions().size()); objectPropertyInclusionManager.rewriteAxioms(factory, axioms, 0); OWLAxiomsExpressivity_withMaps axiomsExpressivity = new OWLAxiomsExpressivity_withMaps(axioms); DLOntology_withMaps dlOntology = clausify_withMaps(factory, ontologyIRI, axioms, axiomsExpressivity); return dlOntology; }
private void handleNotesOntologyChanged(List<OWLOntologyChange> changes) { try { OWLOntologyManager notesOntologyManager = notesOntology.getOWLOntologyManager(); if (notesOntologyManager.getOntologyFormat(notesOntology) instanceof BinaryOWLOntologyDocumentFormat) { OWLAPIProjectDocumentStore documentStore = OWLAPIProjectDocumentStore.getProjectDocumentStore(project.getProjectId()); List<OWLOntologyChangeData> infoList = new ArrayList<OWLOntologyChangeData>(); for (OWLOntologyChange change : changes) { OWLOntologyChangeRecord rec = change.getChangeRecord(); OWLOntologyChangeData info = rec.getData(); infoList.add(info); } BinaryOWLOntologyDocumentSerializer serializer = new BinaryOWLOntologyDocumentSerializer(); serializer.appendOntologyChanges( notesOntologyDocument, new OntologyChangeDataList(infoList, System.currentTimeMillis())); } else { // Swap it over notesOntologyManager.saveOntology(notesOntology, new BinaryOWLOntologyDocumentFormat()); } } catch (OWLOntologyStorageException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } }
/** * Create a reasoner for the given ontology and configuration. * * @param ontology */ public PelletReasoner( OWLOntology ontology, OWLReasonerConfiguration config, BufferingMode bufferingMode) throws IllegalConfigurationException { individualNodeSetPolicy = config.getIndividualNodeSetPolicy(); if (!getFreshEntityPolicy().equals(config.getFreshEntityPolicy())) { throw new IllegalConfigurationException( "PelletOptions.SILENT_UNDEFINED_ENTITY_HANDLING conflicts with reasoner configuration", config); } this.ontology = ontology; monitor = config.getProgressMonitor(); kb = new KnowledgeBase(); kb.setTaxonomyBuilderProgressMonitor(new ProgressAdapter(monitor)); if (config.getTimeOut() > 0) { kb.timers.mainTimer.setTimeout(config.getTimeOut()); } this.manager = ontology.getOWLOntologyManager(); this.factory = manager.getOWLDataFactory(); this.visitor = new PelletVisitor(kb); this.bufferingMode = bufferingMode; manager.addOntologyChangeListener(this); this.shouldRefresh = true; this.pendingChanges = new ArrayList<OWLOntologyChange>(); refresh(); }
@Test public void shouldHandleNullManager_For_GetBottomClassNode() { try { when(ontology.getOWLOntologyManager()).thenReturn(null); reasoner.getBottomClassNode(); } catch (NullPointerException e) { fail("NullPointerException"); } }
public OWLClassExpression parseClassExpression(String classExpressionString) { OWLDataFactory dataFactory = rootOntology.getOWLOntologyManager().getOWLDataFactory(); ManchesterOWLSyntaxEditorParser parser = new ManchesterOWLSyntaxEditorParser(dataFactory, classExpressionString); parser.setDefaultOntology(rootOntology); OWLEntityChecker entityChecker = new ShortFormEntityChecker(bidiShortFormProvider); parser.setOWLEntityChecker(entityChecker); return parser.parseClassExpression(); }
/** Only call within a {@link #moduleReasonerMutex} synchronized block!! */ private void _internalDisposeModuleReasonerAndListener() { if (moduleReasoner != null) { moduleReasoner.dispose(); moduleReasoner = null; } if (moduleListener != null) { aboxOntology.getOWLOntologyManager().removeOntologyChangeListener(moduleListener); moduleListener = null; } }
public DLQueryParser(OWLOntology rootOntology, ShortFormProvider shortFormProvider) { this.rootOntology = rootOntology; OWLOntologyManager manager = rootOntology.getOWLOntologyManager(); Set<OWLOntology> importsClosure = rootOntology.getImportsClosure(); // Create a bidirectional short form provider to do the actual mapping. // It will generate names using the input // short form provider. bidiShortFormProvider = new BidirectionalShortFormProviderAdapter(manager, importsClosure, shortFormProvider); }
public LsignatureExtractorViaInverseRewritingLauncher( OWLOntology ontology, LogicFragment fragment) { this.ontology = null; try { manager = ontology.getOWLOntologyManager(); this.ontology = manager.createOntology(); manager.addAxioms(this.ontology, ontology.getAxioms()); } catch (OWLOntologyCreationException e) { e.printStackTrace(); } this.fragment = fragment; }
@Before public void setUp() { when(ontology.getOWLOntologyManager()).thenReturn(manager); when(manager.getOWLDataFactory()).thenReturn(dataFactory); when(dataFactory.getOWLThing()).thenReturn(owlThing); when(dataFactory.getOWLNothing()).thenReturn(owlNothing); when(dataFactory.getOWLTopObjectProperty()).thenReturn(owlTopObjectProperty); when(dataFactory.getOWLBottomObjectProperty()).thenReturn(owlBottomObjectProperty); when(dataFactory.getOWLTopDataProperty()).thenReturn(owlTopDataProperty); when(dataFactory.getOWLBottomDataProperty()).thenReturn(owlBottomDataProperty); reasoner = new NoOpReasoner(ontology, dataFactory); }
public LsignatureExtractorLauncher( OWLOntology ontology, LogicFragment fragment, boolean integrateRangesFirst) { this.ontology = null; try { OWLOntologyManager manager = ontology.getOWLOntologyManager(); this.ontology = manager.createOntology(); manager.addAxioms(this.ontology, ontology.getAxioms()); } catch (OWLOntologyCreationException e) { e.printStackTrace(); } this.fragment = fragment; this.integrateRangesFirst = integrateRangesFirst; }
public OWLProfileReport checkOntology(OWLOntology ontology) { this.ont = ontology; OWL2DLProfile profile = new OWL2DLProfile(); OWLProfileReport report = profile.checkOntology(ontology); Set<OWLProfileViolation> violations = new HashSet<OWLProfileViolation>(); violations.addAll(report.getViolations()); OWLOntologyWalker ontologyWalker = new OWLOntologyWalker(ontology.getImportsClosure()); OWL2ELProfileObjectVisitor visitor = new OWL2ELProfileObjectVisitor(ontologyWalker, ontology.getOWLOntologyManager()); ontologyWalker.walkStructure(visitor); violations.addAll(visitor.getProfileViolations()); return new OWLProfileReport(this, violations); }
public OWLReasoner getModuleReasoner() throws OWLOntologyCreationException { synchronized (moduleReasonerMutex) { if (moduleReasoner == null) { moduleReasoner = createModuleReasoner(); } if (moduleListener == null) { moduleListener = createModuleChangeListener(); OWLOntologyChangeBroadcastStrategy strategy = new SpecificOntologyChangeBroadcastStrategy(aboxOntology); aboxOntology.getOWLOntologyManager().addOntologyChangeListener(moduleListener, strategy); } } return moduleReasoner; }
private OWLOntology reload( OWLOntology ontology, OWLOntologyFormat format, OWLOntologyLoaderConfiguration configuration) throws IOException, OWLOntologyStorageException, OWLOntologyCreationException { OWLOntologyManager man = ontology.getOWLOntologyManager(); File tempFile = File.createTempFile("Ontology", ".owl"); man.saveOntology(ontology, format, new FileDocumentTarget(tempFile)); OWLOntologyManager man2 = getManager(); // OWLManager.createOWLOntologyManager(); OWLOntology reloaded = man2.loadOntologyFromOntologyDocument(new FileDocumentSource(tempFile), configuration); man2.removeAxioms(reloaded, new HashSet<OWLAxiom>(reloaded.getAxioms(AxiomType.DECLARATION))); return reloaded; }
@SuppressWarnings({"unchecked", "rawtypes"}) private OWLReasoner createModuleReasoner() throws OWLOntologyCreationException { LOG.info("Creating module reasoner for module: " + modelId); ModuleType mtype = ModuleType.BOT; OWLOntologyManager m = OWLManager.createOWLOntologyManager( aboxOntology.getOWLOntologyManager().getOWLDataFactory()); SyntacticLocalityModuleExtractor sme = new SyntacticLocalityModuleExtractor(m, aboxOntology, mtype); Set<OWLEntity> seeds = (Set) aboxOntology.getIndividualsInSignature(); OWLOntology module = sme.extractAsOntology(seeds, IRI.generateDocumentIRI()); OWLReasoner reasoner = reasonerFactory.createReasoner(module); LOG.info("Done creating module reasoner module: " + modelId); return reasoner; }
public void mergeOntologies() { List<OWLOntologyChange> changes = new ArrayList<OWLOntologyChange>(); for (OWLOntology ont : ontologies) { if (!ont.equals(targetOntology)) { // move the axioms for (OWLAxiom ax : ont.getAxioms()) { changes.add(new AddAxiom(targetOntology, ax)); } // move ontology annotations for (OWLAnnotation annot : ont.getAnnotations()) { changes.add(new AddOntologyAnnotation(targetOntology, annot)); } if (!targetOntology.getOntologyID().isAnonymous()) { // move ontology imports for (OWLImportsDeclaration decl : ont.getImportsDeclarations()) { if (ontologies.contains(ont.getOWLOntologyManager().getImportedOntology(decl))) { continue; } Optional<IRI> defaultDocumentIRI = targetOntology.getOntologyID().getDefaultDocumentIRI(); if (defaultDocumentIRI.isPresent() && !decl.getIRI().equals(defaultDocumentIRI.get())) { changes.add(new AddImport(targetOntology, decl)); } else { logger.warn( "Merge: ignoring import declaration for ontology " + targetOntology.getOntologyID() + " (would result in target ontology importing itself)."); } } } } } try { owlOntologyManager.applyChanges(changes); } catch (OWLOntologyChangeException e) { ErrorLogPanel.showErrorDialog(e); } }
/** @return ontology manager for tbox */ public OWLOntologyManager getOWLOntologyManager() { return tboxOntology.getOWLOntologyManager(); }
public void dispose(KuabaRepository kr) { OWLOntology model = (OWLOntology) kr.getModel(); repoMap.remove(model.getOntologyID().getOntologyIRI()); fileMap.remove(kr); model.getOWLOntologyManager().removeOntology(model); }
public PhenexToOWLClassic(OWLOntology ontology) throws OWLOntologyCreationException { this.ontology = ontology; this.ontologyManager = ontology.getOWLOntologyManager(); this.factory = this.ontologyManager.getOWLDataFactory(); }
/** @return */ public OWLClassExpression top() { return ontology.getOWLOntologyManager().getOWLDataFactory().getOWLThing(); }
protected OWLOntology getOntologyAsOWLOntology( OWLOntologyID ontologyId, boolean merge, IRI universalPrefix) { // if (merge) throw new UnsupportedOperationException("Merge not implemented yet for // OWLOntology."); // Remove the check below. It might be an unmanaged dependency (TODO remove from collector and // reintroduce check?). // if (!hasOntology(ontologyIri)) return null; OWLOntology o; o = ontologyProvider.getStoredOntology(ontologyId, OWLOntology.class, merge); if (merge) { final Set<OWLOntology> set = new HashSet<OWLOntology>(); log.debug("Merging {} with its imports, if any.", o); set.add(o); // Actually, if the provider already performed the merge, this won't happen for (OWLOntology impo : o.getImportsClosure()) { log.debug("Imported ontology {} will be merged with {}.", impo, o); set.add(impo); } OWLOntologySetProvider provider = new OWLOntologySetProvider() { @Override public Set<OWLOntology> getOntologies() { return set; } }; OWLOntologyMerger merger = new OWLOntologyMerger(provider); try { o = merger.createMergedOntology( OWLManager.createOWLOntologyManager(), ontologyId.getOntologyIRI()); } catch (OWLOntologyCreationException e) { log.error("Failed to merge imports for ontology " + ontologyId, e); // do not reassign the root ontology } } else { // Rewrite import statements List<OWLOntologyChange> changes = new ArrayList<OWLOntologyChange>(); OWLDataFactory df = OWLManager.getOWLDataFactory(); /* * TODO manage import rewrites better once the container ID is fully configurable (i.e. instead of * going upOne() add "session" or "ontology" if needed). But only do this if we keep considering * imported ontologies as *not* managed. */ for (OWLImportsDeclaration oldImp : o.getImportsDeclarations()) { changes.add(new RemoveImport(o, oldImp)); String s = oldImp.getIRI().toString(); // FIXME Ugly way to check, but we'll get through with it if (s.contains("::")) s = s.substring(s.indexOf("::") + 2, s.length()); boolean managed = managedOntologies.contains(oldImp.getIRI()); // For space, always go up at least one String tid = getID(); if (backwardPathLength > 0) tid = tid.split("/")[0]; IRI target = IRI.create( (managed ? universalPrefix + "/" + tid + "/" : URIUtils.upOne(universalPrefix) + "/") + s); changes.add(new AddImport(o, df.getOWLImportsDeclaration(target))); } o.getOWLOntologyManager().applyChanges(changes); } return o; }
private void rearrangeAndCompareResults( OWLOntology ontology, OWLReasoner reasoner, Jedis resultStore, Jedis resultStore2, Jedis idReader) throws Exception { new ResultRearranger().initializeAndRearrange(); Set<OWLClass> classes = ontology.getClassesInSignature(); // rearranged results are in DB-1 resultStore.select(1); double classCount = 0; int multiplier = 1; int missCount = 0; String bottomID = Util.getPackedID(Constants.BOTTOM_ID, EntityType.CLASS); System.out.println("Comparing Classes... " + classes.size()); OWLClass owlThing = ontology.getOWLOntologyManager().getOWLDataFactory().getOWLThing(); for (OWLClass cl : classes) { String classID = conceptToID(cl.toString(), idReader); // REL/Pellet doesn't consider individuals i.e. {a} \sqsubseteq \bottom // so skipping checking bottom if (classID.equals(bottomID)) continue; classCount++; Set<OWLClass> reasonerSuperClasses = reasoner.getSuperClasses(cl, false).getFlattened(); // add cl itself to S(X) computed by reasoner. That is missing // in its result. reasonerSuperClasses.add(cl); reasonerSuperClasses.add(owlThing); // adding equivalent classes -- they are not considered if asked for superclasses Iterator<OWLClass> iterator = reasoner.getEquivalentClasses(cl).iterator(); while (iterator.hasNext()) reasonerSuperClasses.add(iterator.next()); Set<String> superClasses = resultStore.smembers(classID); if (superClasses.size() == reasonerSuperClasses.size()) { compareAndPrintEqualSizedClasses(cl, reasonerSuperClasses, superClasses, idReader); } else { System.out.println( "\n" + cl.toString() + " -- " + superClasses.size() + ", " + reasonerSuperClasses.size()); for (OWLClass scl : reasonerSuperClasses) { String sclID = conceptToID(scl.toString(), idReader); if (!superClasses.contains(sclID)) { System.out.print(cl.toString() + " -ne- " + scl.toString()); System.out.print(" , "); } superClasses.remove(sclID); } for (String s : superClasses) System.out.println("\t -- " + Util.idToConcept(s, idReader) + "(" + s + ")"); System.out.println(); missCount++; } } System.out.println("No of classes not equal: " + missCount); Set<OWLNamedIndividual> individuals = ontology.getIndividualsInSignature(); System.out.println("Rearranging individuals..."); System.out.println("Individuals: " + individuals.size()); System.out.println("Not checking for individuals..."); /* rearrangeIndividuals(individuals, resultStore, resultStore2, idReader); int cnt = 0; for(OWLClass cl : classes) { Set<OWLNamedIndividual> instances = reasoner.getInstances(cl, false).getFlattened(); Set<String> computedInstances = resultStore2.smembers( conceptToID(cl.toString(), idReader)); if(computedInstances.size() == instances.size()) { compareAndPrintEqualSizedIndividuals(cl, instances, computedInstances, idReader); } else { System.out.println(cl.toString() + " -- " + computedInstances.size() + " , " + instances.size()); compareAndPrintUnEqualSizedIndividuals(cl, instances, computedInstances, idReader); cnt++; } } System.out.println("No of classes for which individuals didn't match: " + cnt); */ resultStore.select(0); }
private boolean isOBOFormat(OWLOntology ontology) { OWLOntologyManager man = ontology.getOWLOntologyManager(); return man.getOntologyFormat(ontology) instanceof OBOOntologyFormat; }
public OWLOntology findLsignature( OWLOntology ontology, LogicFragment fragment, Statistics stats) { Timer t = new Timer(); this.stats = stats; Logger_MORe.logInfo("extracting " + fragment.toString() + "-signature"); OWLOntology ret = null; OWLOntologyManager manager = ontology.getOWLOntologyManager(); try { ret = manager.createOntology(); manager.addAxioms(ret, ontology.getAxioms()); } catch (OWLOntologyCreationException e) { e.printStackTrace(); } lSignatureClasses = new HashSet<OWLClass>(); lSignatureOther = new HashSet<OWLEntity>(); compSignatureClasses = new HashSet<OWLClass>(); compSignatureOther = new HashSet<OWLEntity>(); LsignatureExtractorLauncher elkSignatureExtractorLauncher = null; LsignatureExtractorLauncher elkSignatureExtractorIntegratingRangesLauncher = null; LsignatureExtractorViaInverseRewritingLauncher elkSignatureExtractorRewritingInversesLauncher = null; ForkJoinPool executor = new ForkJoinPool(); elkSignatureExtractorLauncher = new LsignatureExtractorLauncher(ontology, LogicFragment.ELK, false); executor.execute(elkSignatureExtractorLauncher); if (ret != null) { // otherwise we have nowhere to return the axioms in the normalised ontologies necessary to // really classify all the extra classses in the lSignature if (rewriteInverses) { elkSignatureExtractorRewritingInversesLauncher = new LsignatureExtractorViaInverseRewritingLauncher(ontology, LogicFragment.ELK); executor.execute(elkSignatureExtractorRewritingInversesLauncher); } if (integrateRanges) { elkSignatureExtractorIntegratingRangesLauncher = new LsignatureExtractorLauncher(ontology, LogicFragment.ELK, true); executor.execute(elkSignatureExtractorIntegratingRangesLauncher); } // check the output of the normal ELKsignature and cancel the other threads if the lSig is the // whole signature initialiseLsignature((LsignatureExtractor) elkSignatureExtractorLauncher.join()); if (compSignatureClasses.isEmpty()) cancelTasks( elkSignatureExtractorIntegratingRangesLauncher, elkSignatureExtractorRewritingInversesLauncher); else { if (elkSignatureExtractorRewritingInversesLauncher != null && extendLsignature( (LsignatureExtractor) elkSignatureExtractorRewritingInversesLauncher.join()) > 0) { manager.addAxioms( ret, ((LsignatureExtractorViaInverseRewritingLauncher) elkSignatureExtractorRewritingInversesLauncher) .getOntology() .getAxioms()); } if (compSignatureClasses.isEmpty()) cancelTasks(elkSignatureExtractorRewritingInversesLauncher); else if (elkSignatureExtractorIntegratingRangesLauncher != null && extendLsignature( (LsignatureExtractor) elkSignatureExtractorIntegratingRangesLauncher.join()) > 0) { manager.addAxioms( ret, ((LsignatureExtractorLauncher) elkSignatureExtractorIntegratingRangesLauncher) .getOntology() .getAxioms()); } } stats.updateLsignatureSize(lSignatureClasses.size(), true); } else { ret = ontology; initialiseLsignature((LsignatureExtractor) elkSignatureExtractorLauncher.join()); } Logger_MORe.logInfo(lSignatureClasses.size() + "classes in lSignature"); Logger_MORe.logDebug(lSignatureClasses.toString()); Logger_MORe.logInfo(compSignatureClasses.size() + "classes in compSignature"); // might be a good idea to try to isolate extra axioms in the normalisation/rewriting - is this // possible/worth the effort? // check the order in which we try to extend the lSignature with each of the rewritten // ontologies and consider if one may be better that the other Logger_MORe.logDebug(t.duration() + "s to find Lsignature"); return ret; }
public static void main(String[] args) throws Exception { if (args.length == 0) { // args = new String[] { "/home/yzhou/backup/20141212/univ-bench-dl-queries.owl"}; args = new String[] {PagodaTester.onto_dir + "fly/fly-all-in-one_rolledUp.owl"}; // args = new String[] { PagodaTester.onto_dir + // "dbpedia/integratedOntology-all-in-one-minus-datatype.owl" }; // args = new String[] { PagodaTester.onto_dir + "npd/npd-all-minus-datatype.owl" }; // args = new String[] { PagodaTester.onto_dir + "bio2rdf/chembl/cco-noDPR.ttl" }; // args = new String[] { PagodaTester.onto_dir + // "bio2rdf/reactome/biopax-level3-processed.owl" }; // args = new String[] { PagodaTester.onto_dir + "bio2rdf/uniprot/core-processed-noDis.owl" // }; } // OWLOntology ontology = OWLHelper.getMergedOntology(args[0], null); // OWLHelper.correctDataTypeRangeAxioms(ontology); OWLOntology ontology = OWLHelper.loadOntology(args[0]); OWLOntologyManager manager = ontology.getOWLOntologyManager(); OWLDataFactory factory = manager.getOWLDataFactory(); // manager.saveOntology(ontology, new FileOutputStream(args[0].replace(".owl", // "_owlapi.owl"))); if (outputFile != null) Utility.redirectCurrentOut(outputFile); int queryID = 0; for (OWLClass cls : ontology.getClassesInSignature(true)) { if (cls.equals(factory.getOWLThing()) || cls.equals(factory.getOWLNothing())) continue; if (!cls.toStringID().contains("Query")) continue; System.out.println("^[Query" + ++queryID + "]"); System.out.println(template.replace("@CLASS", cls.toStringID())); System.out.println(); } for (OWLOntology onto : ontology.getImportsClosure()) for (OWLObjectProperty prop : onto.getObjectPropertiesInSignature()) { // if (!prop.toStringID().contains("Query")) continue; System.out.println("^[Query" + ++queryID + "]"); System.out.println("SELECT ?X ?Y"); System.out.println("WHERE {"); System.out.println("?X <" + prop.toStringID() + "> ?Y ."); System.out.println("}"); System.out.println(); } String[] answerVars = new String[] {"?X", "?Y"}; for (OWLOntology onto : ontology.getImportsClosure()) for (OWLObjectProperty prop : onto.getObjectPropertiesInSignature()) { // if (!prop.toStringID().contains("Query")) continue; for (int i = 0; i < answerVars.length; ++i) { System.out.println("^[Query" + ++queryID + "]"); System.out.println("SELECT " + answerVars[i]); System.out.println("WHERE {"); System.out.println("?X <" + prop.toStringID() + "> ?Y ."); System.out.println("}"); System.out.println(); } } if (outputFile != null) Utility.closeCurrentOut(); }