/** * Example the conclusions graph for introduction of restrictions which require a comprehension * rewrite and declare new (anon) classes for those restrictions. */ public void comprehensionAxioms(Model premises, Model conclusions) { // Comprehend all restriction declarations and note them in a map Map<Resource, Resource> comprehension = new HashMap<>(); StmtIterator ri = conclusions.listStatements(null, RDF.type, OWL.Restriction); while (ri.hasNext()) { Resource restriction = ri.nextStatement().getSubject(); StmtIterator pi = restriction.listProperties(OWL.onProperty); while (pi.hasNext()) { Resource prop = (Resource) pi.nextStatement().getObject(); StmtIterator vi = restriction.listProperties(); while (vi.hasNext()) { Statement rs = vi.nextStatement(); if (!rs.getPredicate().equals(OWL.onProperty)) { // Have a restriction on(prop) of type rs in the conclusions // So assert a premise that such a restriction could exisit Resource comp = premises .createResource() .addProperty(RDF.type, OWL.Restriction) .addProperty(OWL.onProperty, prop) .addProperty(rs.getPredicate(), rs.getObject()); comprehension.put(restriction, comp); } } } } // Comprehend any intersectionOf lists. Introduce anon class which has the form // of the intersection expression. // Rewrite queries of the form (X intersectionOf Y) to the form // (X equivalentClass ?CC) (?CC intersectionOf Y) StmtIterator ii = conclusions.listStatements(null, OWL.intersectionOf, (RDFNode) null); List<Statement> intersections = new ArrayList<>(); while (ii.hasNext()) { intersections.add(ii.nextStatement()); } for (Statement is : intersections) { // Declare in the premises that such an intersection exists Resource comp = premises .createResource() .addProperty(RDF.type, OWL.Class) .addProperty( OWL.intersectionOf, mapList(premises, (Resource) is.getObject(), comprehension)); // Rewrite the conclusions to be a test for equivalence between the class being // queried and the comprehended interesection conclusions.remove(is); conclusions.add(is.getSubject(), OWL.equivalentClass, comp); } // Comprehend any oneOf lists StmtIterator io = conclusions.listStatements(null, OWL.oneOf, (RDFNode) null); while (io.hasNext()) { Statement s = io.nextStatement(); Resource comp = premises.createResource().addProperty(OWL.oneOf, s.getObject()); } }
public static ArrayList<RdfModel> processRDF(InputStream in) { Model model = ModelFactory.createDefaultModel(); ArrayList<RdfModel> result = new ArrayList<RdfModel>(); if (in != null) { model.read(in, "RDF/XML"); // Now, I only care these propties: has-title, year-of, full-name. All three of them must // exist' for (final ResIterator it = model.listSubjectsWithProperty(RdfPropertyList.p_hasTitle); it.hasNext(); ) { RdfModel rm = new RdfModel(); try { final Resource node = it.next().asResource(); // node is a resource which has title property rm.setHasTitle(node.getProperty(RdfPropertyList.p_hasTitle).getString()); StringBuilder authors = new StringBuilder(); StringBuilder dates = new StringBuilder(); for (final StmtIterator all_props = node.listProperties(); all_props.hasNext(); ) { try { Resource all_res = all_props.next().getObject().asResource(); StmtIterator fullnames = all_res.listProperties(RdfPropertyList.p_fullName); StmtIterator years = all_res.listProperties(RdfPropertyList.p_year); // Just for now I may have mutiple author or dates in a String, seperated by comma RdfProcess newprocess = new RdfProcess(); while (fullnames.hasNext()) { String fullname = newprocess.getValue(fullnames.next().getObject()); if (!fullname.equals("Invalid/Lack of Information")) { authors.append(fullname + " , "); } } while (years.hasNext()) { String year = newprocess.getValue(years.next().getObject()); if (!year.equals("Invalid/Lack of Information")) { dates.append(year + " , "); } } } catch (Exception e) { } } rm.setHasDate(dates.toString()); rm.setHasAuthor(authors.toString()); } catch (Exception e) { } result.add(rm); } } return result; }
/** Return a list of all tests of the given type, according to the current filters */ public List<Resource> findTestsOfType(Resource testType) { ArrayList<Resource> result = new ArrayList<>(); StmtIterator si = testDefinitions.listStatements(null, RDF.type, testType); while (si.hasNext()) { Resource test = si.nextStatement().getSubject(); boolean accept = true; // Check test status Literal status = (Literal) test.getProperty(RDFTest.status).getObject(); if (approvedOnly) { accept = status.getString().equals(STATUS_FLAGS[0]); } else { accept = false; for (String STATUS_FLAG : STATUS_FLAGS) { if (status.getString().equals(STATUS_FLAG)) { accept = true; break; } } } // Check for blocked tests for (String BLOCKED_TEST : BLOCKED_TESTS) { if (BLOCKED_TEST.equals(test.toString())) { accept = false; } } // End of filter tests if (accept) { result.add(test); } } return result; }
/** Load the premises or conclusions for the test. */ public Model getDoc(Resource test, Property docType) throws IOException { Model result = ModelFactory.createDefaultModel(); StmtIterator si = test.listProperties(docType); while (si.hasNext()) { String fname = si.nextStatement().getObject().toString() + ".rdf"; loadFile(fname, result); } return result; }
/** Run a single test of any sort, return true if the test succeeds. */ public boolean doRunTest(Resource test) throws IOException { if (test.hasProperty(RDF.type, OWLTest.PositiveEntailmentTest) || test.hasProperty(RDF.type, OWLTest.NegativeEntailmentTest) || test.hasProperty(RDF.type, OWLTest.OWLforOWLTest) || test.hasProperty(RDF.type, OWLTest.ImportEntailmentTest) || test.hasProperty(RDF.type, OWLTest.TrueTest)) { // Entailment tests boolean processImports = test.hasProperty(RDF.type, OWLTest.ImportEntailmentTest); Model premises = getDoc(test, RDFTest.premiseDocument, processImports); Model conclusions = getDoc(test, RDFTest.conclusionDocument); comprehensionAxioms(premises, conclusions); long t1 = System.currentTimeMillis(); InfGraph graph = reasoner.bind(premises.getGraph()); if (printProfile) { ((FBRuleInfGraph) graph).resetLPProfile(true); } Model result = ModelFactory.createModelForGraph(graph); boolean correct = WGReasonerTester.testConclusions(conclusions.getGraph(), result.getGraph()); long t2 = System.currentTimeMillis(); lastTestDuration = t2 - t1; if (printProfile) { ((FBRuleInfGraph) graph).printLPProfile(); } if (test.hasProperty(RDF.type, OWLTest.NegativeEntailmentTest)) { correct = !correct; } return correct; } else if (test.hasProperty(RDF.type, OWLTest.InconsistencyTest)) { // System.out.println("Starting: " + test); Model input = getDoc(test, RDFTest.inputDocument); long t1 = System.currentTimeMillis(); InfGraph graph = reasoner.bind(input.getGraph()); boolean correct = !graph.validate().isValid(); long t2 = System.currentTimeMillis(); lastTestDuration = t2 - t1; return correct; } else if (test.hasProperty(RDF.type, OWLTest.ConsistencyTest)) { // Not used normally becase we are not complete enough to prove consistency // System.out.println("Starting: " + test); Model input = getDoc(test, RDFTest.inputDocument); long t1 = System.currentTimeMillis(); InfGraph graph = reasoner.bind(input.getGraph()); boolean correct = graph.validate().isValid(); long t2 = System.currentTimeMillis(); lastTestDuration = t2 - t1; return correct; } else { for (StmtIterator i = test.listProperties(RDF.type); i.hasNext(); ) { System.out.println("Test type = " + i.nextStatement().getObject()); } throw new ReasonerException("Unknown test type"); } }
/** Load the premises or conclusions for the test, optional performing import processing. */ public Model getDoc(Resource test, Property docType, boolean processImports) throws IOException { if (processImports) { Model result = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, null); StmtIterator si = test.listProperties(docType); while (si.hasNext()) { String fname = si.nextStatement().getObject().toString() + ".rdf"; loadFile(fname, result); } return result; } else { return getDoc(test, docType); } }
private Model JoinMap(Model map) { Model newMap = ModelFactory.createDefaultModel(); StmtIterator stmts = map.listStatements(); while (stmts.hasNext()) { Statement s = stmts.next(); if (s.getSubject().isURIResource() || s.getObject().isURIResource()) { s = newMap.createStatement( checkUri(s.getSubject().asNode(), newMap).asResource(), s.getPredicate(), checkUri(s.getObject().asNode(), newMap)); } newMap.add(s); } return newMap; }
public static void main(String args[]) { OntModel m = ModelFactory.createOntologyModel(); OntDocumentManager dm = m.getDocumentManager(); dm.addAltEntry( "http://www.eswc2006.org/technologies/ontology", "file:" + JENA + "src/examples/resources/eswc-2006-09-21.rdf"); m.read("http://www.eswc2006.org/technologies/ontology"); // create an empty model Model model = ModelFactory.createDefaultModel(); // create the resource Resource johnSmith = model.createResource(personURI); // add the property johnSmith.addProperty(VCARD.FN, fullName); johnSmith.addProperty( VCARD.N, model.createResource().addProperty(VCARD.Given, "jon").addProperty(VCARD.Family, "Smit")); // list the statements in the Model StmtIterator iter = model.listStatements(); // print out the predicate, subject and object of each statement while (iter.hasNext()) { Statement stmt = iter.nextStatement(); // get next statement Resource subject = stmt.getSubject(); // get the subject Property predicate = stmt.getPredicate(); // get the predicate RDFNode object = stmt.getObject(); // get the object System.out.print(subject.toString()); System.out.print(" " + predicate.toString() + " "); if (object instanceof Resource) { System.out.print(object.toString()); } else { // object is a literal System.out.print(" \"" + object.toString() + "\""); } System.out.println(" ."); } }