/** * Initialise les composants du conteneur sémantique une fois les URI des ontologies dispatchées * par OwlDistributionInitializer. */ public void semInit() throws ExceptionInInitializerError { if (isInitialized()) { /** Initialisation de l'ontologie locale. Cette ontologie n'est plus modifiée ensuite. */ // Initialisation du manager de l'ontologie locale. N'est utilisé qu'ici. OWLOntologyManager localOntoManager = OWLManager.createOWLOntologyManager(); OWLOntologyIRIMapper localOntoMapper = new SimpleIRIMapper(getOntologyIri(), getPhysicalIri()); localOntoManager.addIRIMapper(localOntoMapper); // Initialisation de l'ontologie try { // On crée l'ontologie locale. this.localOntology = localOntoManager.loadOntology(getPhysicalIri()); } catch (OWLOntologyCreationException e) { e.printStackTrace(); } // System.out.println("localOntology : " + localOntology + " ; pyhsicalIri : " + // getPhysicalIri().toString()); /** * Initialisation de l'ontologie du reasoner, qui contiendra l'ontologie locale plus d'autres * axiomes éventuels. On crée l'instance, qu'on rattache à un manager et un reasoner * accessibles par des getters. L'instance reste la même, mais le contenu change. */ // Initialisation du manager de l'ontologie du reasoner this.manager = OWLManager.createOWLOntologyManager(); // On crée une deuxième ontologie. try { IRI ontoName = IRI.create(this.nameSpace); OWLOntologyID ontoId = new OWLOntologyID(ontoName); this.reasoningOntology = this.manager.createOntology(ontoId); } catch (OWLOntologyCreationException e) { e.printStackTrace(); } // Initialisation du reasoner PelletReasonerFactory reasonerFactory = PelletReasonerFactory.getInstance(); PelletOptions.USE_INCREMENTAL_CONSISTENCY = true; PelletOptions.USE_COMPLETION_QUEUE = true; // PelletReasoner reasoner = reasonerFactory.createReasoner(reasoningOntology); PelletReasoner reasoner = reasonerFactory.createNonBufferingReasoner(reasoningOntology); // add the reasoner as an ontology change listener this.manager.addOntologyChangeListener(reasoner); reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); /* reasoner.precomputeInferences(InferenceType.CLASS_ASSERTIONS); reasoner.precomputeInferences(InferenceType.DATA_PROPERTY_HIERARCHY); reasoner.precomputeInferences(InferenceType.DISJOINT_CLASSES); reasoner.precomputeInferences(InferenceType.OBJECT_PROPERTY_HIERARCHY); reasoner.precomputeInferences(InferenceType.SAME_INDIVIDUAL); */ this.reasoner = reasoner; } else throw new ExceptionInInitializerError( "Paramètres de l'ontologie non initialisés ; ontoParamsInit = " + Reflection.getCallerClass(2)); // initOntoReasoner(); }
@Override public void setUp() throws Exception { // Loading the OWL file OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); ontology = manager.loadOntologyFromOntologyDocument((new File(owlfile))); // Loading the OBDA data fac = OBDADataFactoryImpl.getInstance(); obdaModel = fac.getOBDAModel(); ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load(obdafile); QuestPreferences p = new QuestPreferences(); p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL); p.setCurrentValueOf(QuestPreferences.OBTAIN_FULL_METADATA, QuestConstants.FALSE); // Creating a new instance of the reasoner QuestOWLFactory factory = new QuestOWLFactory(); QuestOWLConfiguration config = QuestOWLConfiguration.builder().obdaModel(obdaModel).preferences(p).build(); reasoner = factory.createReasoner(ontology, config); // Now we are ready for querying conn = reasoner.getConnection(); }
public OWLOntologyManager createOwlOntologyManager( OWLOntologyLoaderConfiguration config, IRI excludeDocumentIRI) throws Exception { OWLOntologyManager m = OWLManager.createOWLOntologyManager(); config.setMissingImportHandlingStrategy(MissingImportHandlingStrategy.SILENT); loadOntologyFiles(m, config, excludeDocumentIRI); return m; }
@Test public void test03() { File file = new File("TestData/EliminateTransTest03.owl"); OWLOntologyManager man = OWLManager.createOWLOntologyManager(); OWLOntology ontology; try { ontology = man.loadOntologyFromOntologyDocument(file); HornSHIQNormalizer normalizer = new HornSHIQNormalizer(); OWLOntology normalizedOnt = normalizer.normalize(ontology); HornALCHIQTransNormalizer normalizer1 = new HornALCHIQTransNormalizer(); OWLOntology normalizedOnt1 = normalizer1.normalize(normalizedOnt); HornALCHIQNormalizer normalizer2 = new HornALCHIQNormalizer(); OWLOntology normalizedOnt2 = normalizer2.normalize(normalizedOnt1); man.saveOntology( normalizedOnt2, IRI.create(new File("TestData/EliminateTransTest03Norm.owl"))); } catch (OWLOntologyCreationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (OWLOntologyStorageException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
/** * Serializes a batch of triples corresponding to a predicate into one file. Upper bound: * TRIPLE_LIMIT_PER_FILE. */ private static int serializeTripleBatch( OWLOntology ontology, QuestOWLIndividualAxiomIterator iterator, String filePrefix, String predicateName, int fileCount, String format) throws Exception { String fileName = filePrefix + fileCount + ".owl"; OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); // Main buffer OWLOntology aBox = manager.createOntology(IRI.create(predicateName)); // Add the signatures for (OWLDeclarationAxiom axiom : ontology.getAxioms(AxiomType.DECLARATION)) { manager.addAxiom(aBox, axiom); } int tripleCount = 0; while (iterator.hasNext() && (tripleCount < TRIPLE_LIMIT_PER_FILE)) { manager.addAxiom(aBox, iterator.next()); tripleCount++; } // BufferedOutputStream output = new BufferedOutputStream(new // FileOutputStream(outputPath.toFile())); // BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(output, "UTF-8")); BufferedWriter writer = new BufferedWriter(new FileWriter(fileName)); manager.saveOntology(aBox, getOntologyFormat(format), new WriterDocumentTarget(writer)); return tripleCount; }
public static OWLObjectProperty getObjectPropertyFromName(String text) { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLDataFactory factory = manager.getOWLDataFactory(); OWLObjectProperty textOwlObjectProperty = factory.getOWLObjectProperty(IRI.create(ontologyIRI + "#" + text)); return textOwlObjectProperty; }
public static void main(String[] args) { try { SimpleRenderer renderer = new SimpleRenderer(); renderer.setShortFormProvider( new DefaultPrefixManager("http://www.mindswap.org/ontologies/tambis-full.owl#")); ToStringRenderer.getInstance().setRenderer(renderer); OWLOntologyManager man = OWLManager.createOWLOntologyManager(); OWLOntology ont = man.loadOntology( IRI.create( "http://owl.cs.manchester.ac.uk/repository/download?ontology=http://www.cs.manchester.ac.uk/owl/ontologies/tambis-patched.owl")); System.out.println("Loaded!"); OWLReasonerFactory reasonerFactory = PelletReasonerFactory.getInstance(); OWLReasoner reasoner = reasonerFactory.createNonBufferingReasoner(ont); reasoner.getUnsatisfiableClasses(); ExplanationBasedRootClassFinder rdr = new ExplanationBasedRootClassFinder(man, reasoner, reasonerFactory); for (OWLClass cls : rdr.getRootUnsatisfiableClasses()) System.out.println("ROOT! " + cls); } catch (TimeOutException e) { e.printStackTrace(); } catch (ReasonerInterruptedException e) { e.printStackTrace(); } catch (OWLOntologyCreationException e) { e.printStackTrace(); } }
private void getPelletIncrementalClassifierRunTime(String baseOnt, String ontDir) throws Exception { System.out.println("Using Pellet Incremental Classifier..."); GregorianCalendar start = new GregorianCalendar(); File ontFile = new File(baseOnt); IRI documentIRI = IRI.create(ontFile); OWLOntology baseOntology = OWL.manager.loadOntology(documentIRI); IncrementalClassifier classifier = new IncrementalClassifier(baseOntology); classifier.classify(); System.out.println("Logical axioms: " + baseOntology.getLogicalAxiomCount()); System.out.println("Time taken for base ontology (millis): " + Util.getElapsedTime(start)); File ontDirPath = new File(ontDir); File[] allFiles = ontDirPath.listFiles(); OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); addTripsBaseOntologies(manager); int count = 1; for (File owlFile : allFiles) { IRI owlDocumentIRI = IRI.create(owlFile); OWLOntology ontology = manager.loadOntologyFromOntologyDocument(owlDocumentIRI); Set<OWLLogicalAxiom> axioms = ontology.getLogicalAxioms(); for (OWLLogicalAxiom axiom : axioms) OWL.manager.applyChange(new AddAxiom(baseOntology, axiom)); System.out.println("\nLogical axioms: " + baseOntology.getLogicalAxiomCount()); System.out.println(count + " file: " + owlFile.getName()); // System.out.println("" + count + " file: " + owlFile.getName()); // GregorianCalendar start2 = new GregorianCalendar(); classifier.classify(); // System.out.println("Time taken (millis): " + // Util.getElapsedTime(start2)); manager.removeOntology(ontology); count++; } System.out.println("\nTotal time taken (millis): " + Util.getElapsedTime(start)); }
private static OWLAxiom getCurrentPhase() throws OWLOntologyCreationException { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLDataFactory factory = manager.getOWLDataFactory(); OWLNamedIndividual start1 = factory.getOWLNamedIndividual(IRI.create(ontologyIRI + "#start1")); return factory.getOWLClassAssertionAxiom(currentPhase, start1); }
/** * Exports all data into an OWL API ontology. * * @return OWL ontology. * @throws RepositoryException * @throws MalformedQueryException * @throws QueryEvaluationException * @throws RDFHandlerException * @throws OWLOntologyCreationException */ public OWLOntology exportToOwl() throws RepositoryException, MalformedQueryException, QueryEvaluationException, RDFHandlerException, OWLOntologyCreationException { RepositoryConnection conn = SesameAdapter.getInstance(RepoType.EVALUATION).getRepository().getConnection(); GraphQuery query = conn.prepareGraphQuery(QueryLanguage.SPARQL, "CONSTRUCT {?s ?p ?o} WHERE {?s ?p ?o}"); query.evaluate(); StringWriter strWriter = new StringWriter(); RDFHandler writer = new RDFXMLWriter(strWriter); query.evaluate(writer); strWriter.flush(); StringBuffer buffer = strWriter.getBuffer(); OWLOntology o = OWLManager.createOWLOntologyManager() .loadOntologyFromOntologyDocument( new ReaderDocumentSource(new StringReader(buffer.toString()))); conn.close(); return o; }
@Before public void loadOwl() throws Exception { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); String uri = Resources.getResource("ontologies/cases/" + getTestName() + ".owl").toURI().toString(); IRI iri = IRI.create(uri); OWLOntology ont = manager.loadOntologyFromOntologyDocument(iri); if (performInference) { ReasonerConfiguration config = new ReasonerConfiguration(); config.setFactory(ElkReasonerFactory.class.getCanonicalName()); config.setAddDirectInferredEdges(true); ReasonerUtil util = new ReasonerUtil(config, manager, ont); util.reason(); } OWLOntologyWalker walker = new OWLOntologyWalker(manager.getOntologies()); GraphOwlVisitor visitor = new GraphOwlVisitor(walker, graph, new ArrayList<MappedProperty>()); walker.walkStructure(visitor); OwlPostprocessor postprocessor = new OwlPostprocessor(graphDb, Collections.<String, String>emptyMap()); postprocessor.processSomeValuesFrom(); drawGraph(); }
public static void main(String[] args) throws Exception { String url = "https://raw.github.com/structureddynamics/Bibliographic-Ontology-BIBO/master/bibo.xml.owl"; NamingIssueDetection namingIssueDetection = new NamingIssueDetection("/opt/wordnet"); OntModel model = ModelFactory.createOntologyModel(OntModelSpec.RDFS_MEM); model.read(url, null); Set<NamingIssue> nonMatchingChildren = namingIssueDetection.detectNonExactMatchingDirectChildIssues(model); System.out.println(nonMatchingChildren); nonMatchingChildren = namingIssueDetection.detectNonExactMatchingDirectChildIssues(model); System.out.println(nonMatchingChildren); OWLOntologyManager man = OWLManager.createOWLOntologyManager(); OWLOntology ontology = man.loadOntology(IRI.create(url)); nonMatchingChildren = namingIssueDetection.detectNonExactMatchingDirectChildIssues(ontology); System.out.println(nonMatchingChildren); nonMatchingChildren = namingIssueDetection.detectNonExactMatchingDirectChildIssues(ontology); System.out.println(nonMatchingChildren); nonMatchingChildren = namingIssueDetection.detectNonMatchingChildIssues(ontology, false); System.out.println(nonMatchingChildren); nonMatchingChildren = namingIssueDetection.detectNonMatchingChildIssues(ontology, true); System.out.println(nonMatchingChildren); }
public static Map<EdamUri, Concept> load(String edamPath) throws OWLOntologyCreationException { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = manager.loadOntologyFromOntologyDocument(new File(edamPath)); String prefix = ontology.getOntologyID().getOntologyIRI().get().toString(); return ontology .classesInSignature() .filter(c -> EdamUri.isEdamUri(c.getIRI().toString(), prefix)) .collect( Collectors.toMap( c -> new EdamUri(c.getIRI().toString(), prefix), c -> { Concept concept = new Concept(); EntitySearcher.getAnnotations(c, ontology) .forEachOrdered( a -> { if (a.getProperty().isLabel()) concept.setLabel(a.getValue().asLiteral().get().getLiteral()); else if (a.getProperty().isDeprecated()) concept.setObsolete(true); else if (a.getProperty() .toStringID() .equals( "http://www.geneontology.org/formats/oboInOwl#hasExactSynonym") && a.getValue().asLiteral().isPresent()) concept.addExactSynonym(a.getValue().asLiteral().get().getLiteral()); else if (a.getProperty() .toStringID() .equals( "http://www.geneontology.org/formats/oboInOwl#hasNarrowSynonym") && a.getValue().asLiteral().isPresent()) concept.addNarrowSynonym(a.getValue().asLiteral().get().getLiteral()); else if (a.getProperty() .toStringID() .equals( "http://www.geneontology.org/formats/oboInOwl#hasBroadSynonym") && a.getValue().asLiteral().isPresent()) concept.addBroadSynonym(a.getValue().asLiteral().get().getLiteral()); else if (a.getProperty() .toStringID() .equals( "http://www.geneontology.org/formats/oboInOwl#hasDefinition") && a.getValue().asLiteral().isPresent()) concept.setDefinition(a.getValue().asLiteral().get().getLiteral()); else if (a.getProperty().isComment() && a.getValue().asLiteral().isPresent()) concept.setComment(a.getValue().asLiteral().get().getLiteral()); }); if (concept.getLabel().isEmpty()) throw new IllegalStateException( String.format("Label of concept %s is empty", c.getIRI())); return concept; }, (u, v) -> { throw new IllegalStateException(String.format("Duplicate key %s", u)); }, LinkedHashMap::new)); }
protected OWLClass getFreshQueryClass(String base) { long cnt = 0; String iriString = base; while (m_ontologyOperator.getOntology().containsClassInSignature(IRI.create(iriString))) { iriString = base + (cnt++); // not guaranteed to succeed.. however |long| amount of possibilities } return OWLManager.getOWLDataFactory().getOWLClass(IRI.create(iriString)); }
public void precomputeAndCheckResults(String[] args) throws Exception { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); System.out.println("Comparing classification output for " + args[0]); File ontFile = new File(args[0]); IRI documentIRI = IRI.create(ontFile); OWLOntology ontology = manager.loadOntologyFromOntologyDocument(documentIRI); precomputeAndCheckResults(ontology); }
private void runWithSeparateFiles() { if (owlFile == null) { throw new NullPointerException("You have to specify an ontology file!"); } OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = null; OBDADataFactory obdaDataFactory = OBDADataFactoryImpl.getInstance(); try { ontology = manager.loadOntologyFromOntologyDocument((new File(owlFile))); if (disableReasoning) { /* * when reasoning is disabled, we extract only the declaration assertions for the vocabulary */ ontology = extractDeclarations(manager, ontology); } Collection<Predicate> predicates = new ArrayList<>(); for (OWLClass owlClass : ontology.getClassesInSignature()) { Predicate predicate = obdaDataFactory.getClassPredicate(owlClass.getIRI().toString()); predicates.add(predicate); } for (OWLDataProperty owlDataProperty : ontology.getDataPropertiesInSignature()) { Predicate predicate = obdaDataFactory.getDataPropertyPredicate(owlDataProperty.getIRI().toString()); predicates.add(predicate); } for (OWLObjectProperty owlObjectProperty : ontology.getObjectPropertiesInSignature()) { Predicate predicate = obdaDataFactory.getObjectPropertyPredicate(owlObjectProperty.getIRI().toString()); predicates.add(predicate); } OBDAModel obdaModel = loadMappingFile(mappingFile); Ontology inputOntology = OWLAPI3TranslatorUtility.translate(ontology); obdaModel.declareAll(inputOntology.getVocabulary()); int numPredicates = predicates.size(); int i = 1; for (Predicate predicate : predicates) { System.err.println(String.format("Materializing %s (%d/%d)", predicate, i, numPredicates)); serializePredicate(ontology, inputOntology, obdaModel, predicate, outputFile, format); i++; } } catch (OWLOntologyCreationException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } }
private OWLOntology loadFromDisk(String suffix) throws OWLOntologyCreationException { OWLOntologyManager changeManager = OWLManager.createOWLOntologyManager(); for (File delta : deltaDir_.listFiles()) { if (delta.getName().endsWith(suffix)) { return changeManager.loadOntologyFromOntologyDocument(delta); } } return null; }
public PartOfOntology(String directory, String file) { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); try { ontology = manager.loadOntologyFromOntologyDocument(new File(directory, file)); } catch (OWLOntologyCreationException e) { e.printStackTrace(); log(LogLevel.DEBUG, "Failed to load file " + file + " from directory " + directory); return; } success = true; }
public CompleteFusionExportConverterImpl(String propertiesFileLocations) throws Exception { this.propertiesFileLocations = propertiesFileLocations; propertiesUtil = new PropertiesUtil(); owlOntologyManager = OWLManager.createOWLOntologyManager(); onto = owlOntologyManager.loadOntologyFromOntologyDocument( new File( propertiesUtil.getPropertyValue( propertiesFileLocations, COSMICFUSIONEXPORT_OWL_FILE_LOCATION))); factory = new CosmicFusionExportFactory(onto); }
@Override public void prepare() throws TaskException { try { // load the initial version of the ontology manager = OWLManager.createOWLOntologyManager(); initial = manager.loadOntologyFromOntologyDocument(ontologyFile_); // create the OWL reasoner reasoner = getOWLReasonerFactory().createReasoner(initial); } catch (OWLOntologyCreationException e) { throw new TaskException(e); } }
public static void callLogMap() throws OWLOntologyCreationException { String onto1_iri = "http://oaei.ontologymatching.org/tests/101/onto.rdf"; String onto2_iri = "http://oaei.ontologymatching.org/tests/304/onto.rdf"; OWLOntologyManager onto_manager = OWLManager.createOWLOntologyManager(); OWLOntology onto1 = onto_manager.loadOntology(IRI.create(onto1_iri)); OWLOntology onto2 = onto_manager.loadOntology(IRI.create(onto2_iri)); LogMap2_Matcher logmap2 = new LogMap2_Matcher(onto1, onto2); Set<MappingObjectStr> logmap2_mappings = logmap2.getLogmap2_Mappings(); for (MappingObjectStr mappingObject : logmap2_mappings) { System.out.println(" test des mots : " + mappingObject.toString()); } System.out.println("Num.mappings: " + logmap2_mappings.size()); }
private OwlApiFileGateway() { this.repoMap = new HashMap<IRI, KuabaRepository>(); this.fileMap = new HashMap<KuabaRepository, File>(); this.manager = OWLManager.createOWLOntologyManager(); File onto = new File("kuabaOntology/KuabaOntology.owl"); try { if (onto.exists()) load("kuabaOntology/KuabaOntology.owl"); else load(ONTOLOGY_URL); } catch (RepositoryLoadException rle) { JOptionPane.showMessageDialog(null, rle.getMessage()); } }
protected OWLOntology loadBigOntology(String filename) { File file = new File(System.getenv("bigontosdir") + filename + ".owl"); OWLOntology ontology = null; try { ontology = OWLManager.createOWLOntologyManager().loadOntologyFromOntologyDocument(file); } catch (OWLOntologyCreationException e) { e .printStackTrace(); // To change body of catch statement use File | Settings | File // Templates. } return ontology; }
public void mergeAndCompare(String dirPath) throws Exception { File dir = new File(dirPath); File[] files = dir.listFiles(); OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntologyMerger merger = new OWLOntologyMerger(manager); for (File f : files) manager.loadOntologyFromOntologyDocument(IRI.create(f)); String s = "norm-merged-base+300.owl"; IRI iri = IRI.create(new File(s)); OWLOntology mergedOntology = merger.createMergedOntology(manager, iri); manager.saveOntology(mergedOntology, iri); System.out.println("Done creating merged ontology"); // precomputeAndCheckResults(mergedOntology); }
@Override public boolean test() { // Load an example ontology. OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = null; try { ontology = manager.loadOntologyFromOntologyDocument(new File("model/mcs_ontology.owl")); } catch (OWLOntologyCreationException e) { // TODO Auto-generated catch block e.printStackTrace(); } // We need a reasoner to do our query answering // These two lines are the only relevant difference between this code // and the original example // This example uses HermiT: http://hermit-reasoner.com/ OWLReasoner reasoner = new Reasoner.ReasonerFactory().createReasoner(ontology); ShortFormProvider shortFormProvider = new SimpleShortFormProvider(); // Create the DLQueryPrinter helper class. This will manage the // parsing of input and printing of results DLQueryPrinter dlQueryPrinter = new DLQueryPrinter(new DLQueryEngine(reasoner, shortFormProvider), shortFormProvider); // Enter the query loop. A user is expected to enter class // expression on the command line. BufferedReader br = null; try { br = new BufferedReader(new InputStreamReader(System.in, "UTF-8")); } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } while (true) { System.out.println( "Type a class expression in Manchester Syntax and press Enter (or press x to exit):"); String classExpression = null; try { classExpression = br.readLine(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } // Check for exit condition if (classExpression == null || classExpression.equalsIgnoreCase("x")) { break; } dlQueryPrinter.askQuery(classExpression.trim()); System.out.println(); } return true; }
private void setupReasoner(File owlFile, File obdaFile) throws Exception { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = manager.loadOntologyFromOntologyDocument(owlFile); OBDAModel obdaModel = ofac.getOBDAModel(); ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load(obdaFile); QuestOWLFactory factory = new QuestOWLFactory(); factory.setOBDAController(obdaModel); factory.setPreferenceHolder(prefs); reasoner = (QuestOWL) factory.createReasoner(ontology, new SimpleConfiguration()); }
public KB(Profile profile) throws OWLOntologyCreationException, OWLOntologyStorageException, OWLProfilesViolationsException, IOException, CloneNotSupportedException, UnsupportedAxiomsException, IPException, PrologEngineCreationException { this.profile = profile; ontologyManager = OWLManager.createOWLOntologyManager(); dataFactory = ontologyManager.getOWLDataFactory(); concepts = new HashMap<String, OWLClass>(); roles = new HashMap<String, OWLObjectProperty>(); dataRoles = new HashMap<String, OWLDataProperty>(); individuals = new HashMap<String, OWLIndividual>(); setup(); clear = true; }
@Test public void shouldTestNoResultsSubClassAxiom() throws Exception { OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology ontology = manager.createOntology(); OPPLFactory opplFactory = new OPPLFactory(manager, ontology, null); ConstraintSystem constraintSystem = opplFactory.createConstraintSystem(); Variable<OWLClassExpression> x = constraintSystem.createVariable("?x", VariableTypeFactory.getCLASSVariableType(), null); OWLDataFactory dataFactory = manager.getOWLDataFactory(); OWLClass a = dataFactory.getOWLClass(IRI.create("A")); OWLClass b = dataFactory.getOWLClass(IRI.create("B")); OWLClass c = dataFactory.getOWLClass(IRI.create("C")); OWLObjectProperty p = dataFactory.getOWLObjectProperty(IRI.create("p")); manager.addAxiom( ontology, dataFactory.getOWLSubClassOfAxiom( dataFactory.getOWLObjectSomeValuesFrom(p, a), dataFactory.getOWLObjectSomeValuesFrom(p, b))); OWLSubClassOfAxiom axiom = dataFactory.getOWLSubClassOfAxiom( dataFactory.getOWLObjectSomeValuesFrom(p, dataFactory.getOWLClass(x.getIRI())), dataFactory.getOWLObjectSomeValuesFrom(p, c)); AxiomSolvability axiomSolvability = new NoResultsAxiomSolvability(constraintSystem, new AssertedModelQuerySolver(manager)); SolvabilitySearchNode node = axiomSolvability.getSolvabilitySearchNode(axiom, BindingNode.createNewEmptyBindingNode()); node.accept( new SolvabilitySearchNodeVisitor() { @Override public void visitUnsolvableSearchNode(UnsolvableSearchNode unsolvableSearchNode) { fail("Wrong type of solvability node: unsolvable, when expecting no solutions"); } @Override public void visitSolvedSearchNode(SolvedSearchNode solvedSearchNode) { fail("Wrong type of solvability node: solved, when expecting no solutions"); } @Override public void visitNoSolutionSolvableSearchNode( NoSolutionSolvableSearchNode noSolutionSolvableSearchNode) { // That's fine } @Override public void visitSolvableSearchNode(SolvableSearchNode solvableSearchNode) { fail("Wrong type of solvability node: no solution, when expecting no solutions"); } }); }
public void getELKIncrementalRuntime(String baseOnt, String ontDir) throws Exception { GregorianCalendar start = new GregorianCalendar(); OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); IRI documentIRI = IRI.create(new File(baseOnt)); OWLOntology baseOntology = manager.loadOntology(documentIRI); System.out.println("Logical axioms: " + baseOntology.getLogicalAxiomCount()); // findDataHasValueAxiom(baseOntology.getAxioms(AxiomType.SUBCLASS_OF)); OWLReasonerFactory reasonerFactory = new ElkReasonerFactory(); OWLReasoner reasoner = reasonerFactory.createReasoner(baseOntology); reasoner.precomputeInferences(InferenceType.CLASS_HIERARCHY); File[] files = new File(ontDir).listFiles(); int count = 0; OWLOntologyManager manager2 = OWLManager.createOWLOntologyManager(); addTripsBaseOntologies(manager2); for (File file : files) { System.out.println("File name: " + file.getName()); documentIRI = IRI.create(file); OWLOntology ontology = manager2.loadOntology(documentIRI); Set<OWLLogicalAxiom> axioms = ontology.getLogicalAxioms(); // findDataHasValueAxiom(ontology.getAxioms(AxiomType.SUBCLASS_OF)); manager.addAxioms(baseOntology, axioms); reasoner.flush(); System.out.println("Logical axioms: " + baseOntology.getLogicalAxiomCount()); // From the ELK wiki, it seems ABox reasoning will trigger TBox reasoning reasoner.precomputeInferences(InferenceType.CLASS_ASSERTIONS); manager2.removeOntology(ontology); count++; System.out.println("Done with " + count); // if(count == 5) // break; } reasoner.dispose(); System.out.println("Time taken (millis): " + Util.getElapsedTime(start)); }
@SuppressWarnings({"unchecked", "rawtypes"}) private OWLReasoner createModuleReasoner() throws OWLOntologyCreationException { LOG.info("Creating module reasoner for module: " + modelId); ModuleType mtype = ModuleType.BOT; OWLOntologyManager m = OWLManager.createOWLOntologyManager( aboxOntology.getOWLOntologyManager().getOWLDataFactory()); SyntacticLocalityModuleExtractor sme = new SyntacticLocalityModuleExtractor(m, aboxOntology, mtype); Set<OWLEntity> seeds = (Set) aboxOntology.getIndividualsInSignature(); OWLOntology module = sme.extractAsOntology(seeds, IRI.generateDocumentIRI()); OWLReasoner reasoner = reasonerFactory.createReasoner(module); LOG.info("Done creating module reasoner module: " + modelId); return reasoner; }