private static List<Resource> equivalentTypes(Resource type) { List<Resource> types = new ArrayList<Resource>(); types.add(type); for (StmtIterator it = type.getModel().listStatements(ANY, OWL.equivalentClass, type); it.hasNext(); ) types.add(it.nextStatement().getSubject()); return types; }
/** Return a list of all tests of the given type, according to the current filters */ public List<Resource> findTestsOfType(Resource testType) { ArrayList<Resource> result = new ArrayList<>(); StmtIterator si = testDefinitions.listStatements(null, RDF.type, testType); while (si.hasNext()) { Resource test = si.nextStatement().getSubject(); boolean accept = true; // Check test status Literal status = (Literal) test.getProperty(RDFTest.status).getObject(); if (approvedOnly) { accept = status.getString().equals(STATUS_FLAGS[0]); } else { accept = false; for (String STATUS_FLAG : STATUS_FLAGS) { if (status.getString().equals(STATUS_FLAG)) { accept = true; break; } } } // Check for blocked tests for (String BLOCKED_TEST : BLOCKED_TESTS) { if (BLOCKED_TEST.equals(test.toString())) { accept = false; } } // End of filter tests if (accept) { result.add(test); } } return result; }
/** * Index all the resources in a Jena Model to ES * * @param model the model to index * @param bulkRequest a BulkRequestBuilder * @param getPropLabel if set to true all URI property values will be indexed as their label. The * label is taken as the value of one of the properties set in {@link #uriDescriptionList}. */ private void addModelToES(Model model, BulkRequestBuilder bulkRequest, boolean getPropLabel) { long startTime = System.currentTimeMillis(); long bulkLength = 0; HashSet<Property> properties = new HashSet<Property>(); StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement st = it.nextStatement(); Property prop = st.getPredicate(); String property = prop.toString(); if (rdfPropList.isEmpty() || (isWhitePropList && rdfPropList.contains(property)) || (!isWhitePropList && !rdfPropList.contains(property)) || (normalizeProp.containsKey(property))) { properties.add(prop); } } ResIterator resIt = model.listSubjects(); while (resIt.hasNext()) { Resource rs = resIt.nextResource(); Map<String, ArrayList<String>> jsonMap = getJsonMap(rs, properties, model, getPropLabel); bulkRequest.add( client.prepareIndex(indexName, typeName, rs.toString()).setSource(mapToString(jsonMap))); bulkLength++; // We want to execute the bulk for every DEFAULT_BULK_SIZE requests if (bulkLength % EEASettings.DEFAULT_BULK_SIZE == 0) { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); // After executing, flush the BulkRequestBuilder. bulkRequest = client.prepareBulk(); if (bulkResponse.hasFailures()) { processBulkResponseFailure(bulkResponse); } } } // Execute remaining requests if (bulkRequest.numberOfActions() > 0) { BulkResponse response = bulkRequest.execute().actionGet(); // Handle failure by iterating through each bulk response item if (response.hasFailures()) { processBulkResponseFailure(response); } } // Show time taken to index the documents logger.info( "Indexed {} documents on {}/{} in {} seconds", bulkLength, indexName, typeName, (System.currentTimeMillis() - startTime) / 1000.0); }
/** Load the premises or conclusions for the test. */ public Model getDoc(Resource test, Property docType) throws IOException { Model result = ModelFactory.createDefaultModel(); StmtIterator si = test.listProperties(docType); while (si.hasNext()) { String fname = si.nextStatement().getObject().toString() + ".rdf"; loadFile(fname, result); } return result; }
/** * Add to <code>toAdd</code> all the superclass statements needed to note that any indirect * subclass of <code>X = parents.item</code> has as superclass all the classes between it and X * and all the remaining elements of <code>parents</code>. */ private static void addSuperClasses(Model m, LinkedSeq parents, Model toAdd) { Resource type = parents.item; for (StmtIterator it = m.listStatements(null, RDFS.subClassOf, type); it.hasNext(); ) { Resource t = it.nextStatement().getSubject(); for (LinkedSeq scan = parents.rest; scan != null; scan = scan.rest) toAdd.add(t, RDFS.subClassOf, scan.item); addSuperClasses(m, parents.push(t), toAdd); } }
/** Run a single test of any sort, return true if the test succeeds. */ public boolean doRunTest(Resource test) throws IOException { if (test.hasProperty(RDF.type, OWLTest.PositiveEntailmentTest) || test.hasProperty(RDF.type, OWLTest.NegativeEntailmentTest) || test.hasProperty(RDF.type, OWLTest.OWLforOWLTest) || test.hasProperty(RDF.type, OWLTest.ImportEntailmentTest) || test.hasProperty(RDF.type, OWLTest.TrueTest)) { // Entailment tests boolean processImports = test.hasProperty(RDF.type, OWLTest.ImportEntailmentTest); Model premises = getDoc(test, RDFTest.premiseDocument, processImports); Model conclusions = getDoc(test, RDFTest.conclusionDocument); comprehensionAxioms(premises, conclusions); long t1 = System.currentTimeMillis(); InfGraph graph = reasoner.bind(premises.getGraph()); if (printProfile) { ((FBRuleInfGraph) graph).resetLPProfile(true); } Model result = ModelFactory.createModelForGraph(graph); boolean correct = WGReasonerTester.testConclusions(conclusions.getGraph(), result.getGraph()); long t2 = System.currentTimeMillis(); lastTestDuration = t2 - t1; if (printProfile) { ((FBRuleInfGraph) graph).printLPProfile(); } if (test.hasProperty(RDF.type, OWLTest.NegativeEntailmentTest)) { correct = !correct; } return correct; } else if (test.hasProperty(RDF.type, OWLTest.InconsistencyTest)) { // System.out.println("Starting: " + test); Model input = getDoc(test, RDFTest.inputDocument); long t1 = System.currentTimeMillis(); InfGraph graph = reasoner.bind(input.getGraph()); boolean correct = !graph.validate().isValid(); long t2 = System.currentTimeMillis(); lastTestDuration = t2 - t1; return correct; } else if (test.hasProperty(RDF.type, OWLTest.ConsistencyTest)) { // Not used normally becase we are not complete enough to prove consistency // System.out.println("Starting: " + test); Model input = getDoc(test, RDFTest.inputDocument); long t1 = System.currentTimeMillis(); InfGraph graph = reasoner.bind(input.getGraph()); boolean correct = graph.validate().isValid(); long t2 = System.currentTimeMillis(); lastTestDuration = t2 - t1; return correct; } else { for (StmtIterator i = test.listProperties(RDF.type); i.hasNext(); ) { System.out.println("Test type = " + i.nextStatement().getObject()); } throw new ReasonerException("Unknown test type"); } }
/** Load the premises or conclusions for the test, optional performing import processing. */ public Model getDoc(Resource test, Property docType, boolean processImports) throws IOException { if (processImports) { Model result = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, null); StmtIterator si = test.listProperties(docType); while (si.hasNext()) { String fname = si.nextStatement().getObject().toString() + ".rdf"; loadFile(fname, result); } return result; } else { return getDoc(test, docType); } }
public void testQuintetOfQuadlets() { Resource rs = model.createResource(); rs.addProperty(RDF.type, RDF.Statement); model.createResource().addProperty(RDF.value, rs); rs.addProperty(RDF.subject, model.createResource()); rs.addProperty(RDF.predicate, model.createProperty("http://example.org/foo")); rs.addProperty(RDF.object, model.createResource()); rs.addProperty(RDF.object, model.createResource()); StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement s = it.nextStatement(); assertFalse(s.getObject().equals(s.getSubject())); } }
public Collection<URI> getSupportedFacets(URI needUri) throws NoSuchNeedException { List<URI> ret = new LinkedList<URI>(); Need need = DataAccessUtils.loadNeed(needRepository, needUri); Model content = rdfStorageService.loadContent(need); if (content == null) return ret; Resource baseRes = content.getResource(content.getNsPrefixURI("")); StmtIterator stmtIterator = baseRes.listProperties(WON.HAS_FACET); while (stmtIterator.hasNext()) { RDFNode object = stmtIterator.nextStatement().getObject(); if (object.isURIResource()) { ret.add(URI.create(object.toString())); } } return ret; }
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String path = request.getPathInfo(); boolean htmlOutput = true; String language = null; String contentType = "text/html; charset='utf-8'"; if (path != null) { if (path.endsWith(".rdf")) { contentType = "application/rdf+xml; charset='utf-8'"; htmlOutput = false; language = "RDF/XML"; } else if (path.endsWith(".xml")) { contentType = "application/xml; charset='utf-8'"; htmlOutput = false; language = "RDF/XML"; } else if (path.endsWith(".n3")) { contentType = "text/n3; charset='utf-8'"; htmlOutput = false; language = "N3"; } } response.setContentType(contentType); response.setStatus(HttpServletResponse.SC_OK); Writer writer = response.getWriter(); synchronized (board) { if (htmlOutput) { writer.write( "<!DOCTYPE html>\n" + "<html lang='en'>" + "<head><meta charset='utf-8'/><title>MATe model</title></head>" + "<body><ul>"); StmtIterator it = model.listStatements(); /* TODO: well, this could be prettier */ while (it.hasNext()) writer.write("<li>" + it.nextStatement() + "</li>"); writer.write("<ul></body></html>"); } else model.write(writer, language); } }
void runTestAsk(Query query, QueryExecution qe) throws Exception { boolean result = qe.execAsk(); if (results != null) { if (results.isBoolean()) { boolean b = results.getBooleanResult(); assertEquals("ASK test results do not match", b, result); } else { Model resultsAsModel = results.getModel(); StmtIterator sIter = results.getModel().listStatements(null, RDF.type, ResultSetGraphVocab.ResultSet); if (!sIter.hasNext()) throw new QueryTestException("Can't find the ASK result"); Statement s = sIter.nextStatement(); if (sIter.hasNext()) throw new QueryTestException("Too many result sets in ASK result"); Resource r = s.getSubject(); Property p = resultsAsModel.createProperty(ResultSetGraphVocab.getURI() + "boolean"); boolean x = r.getRequiredProperty(p).getBoolean(); if (x != result) assertEquals("ASK test results do not match", x, result); } } return; }
protected static void addDomainTypes(Model result, Model schema) { for (StmtIterator it = schema.listStatements(ANY, RDFS.domain, ANY); it.hasNext(); ) { Statement s = it.nextStatement(); Property property = s.getSubject().as(Property.class); RDFNode type = s.getObject(); for (StmtIterator x = result.listStatements(ANY, property, ANY); x.hasNext(); ) { Statement t = x.nextStatement(); result.add(t.getSubject(), RDF.type, type); } } }
protected static void addRangeTypes(Model result, Model schema) { Model toAdd = ModelFactory.createDefaultModel(); for (StmtIterator it = schema.listStatements(ANY, RDFS.range, ANY); it.hasNext(); ) { Statement s = it.nextStatement(); RDFNode type = s.getObject(); Property property = s.getSubject().as(Property.class); for (StmtIterator x = result.listStatements(ANY, property, ANY); x.hasNext(); ) { RDFNode ob = x.nextStatement().getObject(); if (ob.isResource()) toAdd.add((Resource) ob, RDF.type, type); } } result.add(toAdd); }
protected static void addSupertypes(Model result) { Model temp = ModelFactory.createDefaultModel(); for (StmtIterator it = result.listStatements(ANY, RDF.type, ANY); it.hasNext(); ) { Statement s = it.nextStatement(); Resource c = AssemblerHelp.getResource(s); for (StmtIterator subclasses = result.listStatements(c, RDFS.subClassOf, ANY); subclasses.hasNext(); ) { RDFNode type = subclasses.nextStatement().getObject(); // System.err.println( ">> adding super type: subject " + s.getSubject() + ", type " + type // ); temp.add(s.getSubject(), RDF.type, type); } } result.add(temp); }
private static void addIntersections(Model result, Model schema) { StmtIterator it = schema.listStatements(ANY, OWL.intersectionOf, ANY); while (it.hasNext()) addIntersections(result, schema, it.nextStatement()); }
/** * Example the conclusions graph for introduction of restrictions which require a comprehension * rewrite and declare new (anon) classes for those restrictions. */ public void comprehensionAxioms(Model premises, Model conclusions) { // Comprehend all restriction declarations and note them in a map Map<Resource, Resource> comprehension = new HashMap<>(); StmtIterator ri = conclusions.listStatements(null, RDF.type, OWL.Restriction); while (ri.hasNext()) { Resource restriction = ri.nextStatement().getSubject(); StmtIterator pi = restriction.listProperties(OWL.onProperty); while (pi.hasNext()) { Resource prop = (Resource) pi.nextStatement().getObject(); StmtIterator vi = restriction.listProperties(); while (vi.hasNext()) { Statement rs = vi.nextStatement(); if (!rs.getPredicate().equals(OWL.onProperty)) { // Have a restriction on(prop) of type rs in the conclusions // So assert a premise that such a restriction could exisit Resource comp = premises .createResource() .addProperty(RDF.type, OWL.Restriction) .addProperty(OWL.onProperty, prop) .addProperty(rs.getPredicate(), rs.getObject()); comprehension.put(restriction, comp); } } } } // Comprehend any intersectionOf lists. Introduce anon class which has the form // of the intersection expression. // Rewrite queries of the form (X intersectionOf Y) to the form // (X equivalentClass ?CC) (?CC intersectionOf Y) StmtIterator ii = conclusions.listStatements(null, OWL.intersectionOf, (RDFNode) null); List<Statement> intersections = new ArrayList<>(); while (ii.hasNext()) { intersections.add(ii.nextStatement()); } for (Statement is : intersections) { // Declare in the premises that such an intersection exists Resource comp = premises .createResource() .addProperty(RDF.type, OWL.Class) .addProperty( OWL.intersectionOf, mapList(premises, (Resource) is.getObject(), comprehension)); // Rewrite the conclusions to be a test for equivalence between the class being // queried and the comprehended interesection conclusions.remove(is); conclusions.add(is.getSubject(), OWL.equivalentClass, comp); } // Comprehend any oneOf lists StmtIterator io = conclusions.listStatements(null, OWL.oneOf, (RDFNode) null); while (io.hasNext()) { Statement s = io.nextStatement(); Resource comp = premises.createResource().addProperty(OWL.oneOf, s.getObject()); } }
public static void main(String[] args) { List<String> obj = new ArrayList<String>(); Scanner input = new Scanner(System.in); System.out.print("Enter URI: "); String userIn = input.nextLine(); // create an empty Model Model model = ModelFactory.createDefaultModel(); // read the RDF/XML file model.read(userIn); // write it to standard out // model.write(System.out); // list the statements in the Model StmtIterator iter = model.listStatements(); System.out.println(); // print out the predicate, subject and object of each statement while (iter.hasNext()) { Statement stmt = iter.nextStatement(); // get next statement Resource subject = stmt.getSubject(); // get the subject Property predicate = stmt.getPredicate(); // get the predicate RDFNode object = stmt.getObject(); // get the object System.out.print(subject.toString()); System.out.print(" -> " + predicate.toString() + " -> "); if (object instanceof Resource) { System.out.print(object.toString() + "\n"); } else { // object is a literal System.out.print(" \"" + object.toString() + "\"\n"); } } /* for(int i = 0; i < (obj.size()); i++){ String sparqlQueryString1= "SELECT ?s ?o "+ "WHERE {"+ "?s ?p ?o ."+ "?o <bif:contains> \""+obj.get(i)+"\" ."+ "}"+ "limit 10"; Query query = QueryFactory.create(sparqlQueryString1); QueryExecution qexec = QueryExecutionFactory.sparqlService("http://pubmed.bio2rdf.org/sparql", query); ResultSet results = qexec.execSelect(); System.out.println("Query: "+obj.get(i)); ResultSetFormatter.out(System.out, results, query); qexec.close() ; } */ }
protected static void addSubclassesFrom(Model result, Model schema) { for (StmtIterator it = schema.listStatements(ANY, RDFS.subClassOf, ANY); it.hasNext(); ) { Statement s = it.nextStatement(); if (s.getSubject().isURIResource() && s.getObject().isURIResource()) result.add(s); } }
/** * Answer the set of all classes which appear in <code>m</code> as the subject or object of a * <code>rdfs:subClassOf</code> statement. */ private static Set<RDFNode> findClassesBySubClassOf(Model m) { Set<RDFNode> classes = new HashSet<RDFNode>(); StmtIterator it = m.listStatements(null, RDFS.subClassOf, (RDFNode) null); while (it.hasNext()) addClasses(classes, it.nextStatement()); return classes; }