private boolean isObsolete(Resource ctx) throws SailException { CloseableIteration<? extends Statement, SailException> stmts; stmts = super.getStatements(null, null, null, true, ctx); try { while (stmts.hasNext()) { Statement st = stmts.next(); URI pred = st.getPredicate(); Value obj = st.getObject(); String ns = pred.getNamespace(); if (Audit.NAMESPACE.equals(ns) || PROV.equals(ns) || AUDIT_2012.equals(ns)) continue; if (RDF.SUBJECT.equals(pred) || RDF.PREDICATE.equals(pred) || RDF.OBJECT.equals(pred)) continue; if (RDF.TYPE.equals(pred) && obj instanceof URI) { ns = ((URI) obj).getNamespace(); if (Audit.NAMESPACE.equals(ns) || PROV.equals(ns) || AUDIT_2012.equals(ns) || RDF.NAMESPACE.equals(ns)) continue; } return false; } } finally { stmts.close(); } return true; }
/** * find properties with no domain or range defined and also add some generic elements like label * or type * * @return */ List<OntologyElement> findHangingElements() throws Exception { List<OntologyElement> elements = new ArrayList<OntologyElement>(); Set<String> properties = new HashSet<String>(); properties.add(RDFS.LABEL.toString()); properties.add(RDF.TYPE.toString()); String table = suggestionsHelper.getDatatypePropertiesNoDomain(); properties.addAll(StringUtil.fromStringToSet(table)); String thingUri = "http://www.w3.org/2002/07/owl#Thing"; properties.addAll( luceneAnnotator.getDefinedPropertiesWhereClassIsADomain(thingUri, forceSuperClasses)); properties.addAll( luceneAnnotator.getDefinedPropertiesWhereClassIsARange(thingUri, forceSuperClasses)); elements.addAll(returnPropertyElements(properties)); logger.info("Adding:" + properties.size() + " hanging properties"); Set<String> classes = new HashSet<String>(); classes.add(thingUri); elements.addAll(returnClassElements(classes)); return elements; }
/** * Asynchronously explains why {@code instance} is an instance of {@code cl} * * @param instance The instance * @param cl The class * @param callback The {@link Callback} to execute after the explanation is done */ public void instance(Resource instance, Resource cl, Callback<Graph> callback) { String query = selectQuery(instance.toString(), RDF.TYPE.stringValue(), cl.stringValue()); query(query, callback); }
/** * Explains why {@code instance} is an instance of {@code cl} * * @param instance The instance * @param cl The class * @return The explanation {@link org.openrdf.model.Graph Graph} * @throws PelletClientException If there is an error or the {@code instance} is not an instance * of {@code cl} */ public Graph instance(Resource instance, Resource cl) throws PelletClientException { String query = selectQuery(instance.toString(), RDF.TYPE.stringValue(), cl.stringValue()); return query(query); }
public class SesameDataSet { // Log private static Log log = LogFactory.getLog(SesameDataSet.class); private Repository currentRepository = null; // useful -local- constants static RDFFormat NTRIPLES = RDFFormat.NTRIPLES; static RDFFormat N3 = RDFFormat.N3; static RDFFormat RDFXML = RDFFormat.RDFXML; static String RDFTYPE = RDF.TYPE.toString(); /** In memory Sesame repository without inferencing */ public SesameDataSet() { this(false); } /** * In memory Sesame repository with optional inferencing * * @param inferencing */ public SesameDataSet(boolean inferencing) { try { if (inferencing) { currentRepository = new SailRepository(new ForwardChainingRDFSInferencer(new MemoryStore())); } else { currentRepository = new SailRepository(new MemoryStore()); } currentRepository.initialize(); } catch (RepositoryException e) { e.printStackTrace(); } } public SesameDataSet(String pathToDir, boolean inferencing) { File f = new File(pathToDir); try { if (inferencing) { currentRepository = new SailRepository(new ForwardChainingRDFSInferencer(new NativeStore(f))); } else { currentRepository = new SailRepository(new NativeStore(f)); } currentRepository.initialize(); } catch (RepositoryException e) { e.printStackTrace(); } } public SesameDataSet(String sesameServer, String repositoryID) { currentRepository = new HTTPRepository(sesameServer, repositoryID); try { currentRepository.initialize(); } catch (RepositoryException e) { // TODO Auto-generated catch block e.printStackTrace(); } } /** * Load data in specified graph (use default graph if contexts is null) * * @param filePath * @param format * @param contexts */ public void loadDataFromFile(String filePath, RDFFormat format, Resource... contexts) { RepositoryConnection con; try { con = currentRepository.getConnection(); try { // upload a file File f = new File(filePath); con.add(f, null, format, contexts); } catch (RDFParseException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (RepositoryException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { try { con.close(); } catch (RepositoryException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } catch (RepositoryException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } public void loadDataFromURL(String stringURL) { RepositoryConnection con; try { con = currentRepository.getConnection(); try { // upload a URL URL url = new URL(stringURL); con.add(url, null, RDFFormat.RDFXML); } catch (RDFParseException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (RepositoryException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { try { con.close(); } catch (RepositoryException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } catch (RepositoryException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } /** * Literal factory * * @param s the literal value * @param typeuri uri representing the type (generally xsd) * @return */ public org.openrdf.model.Literal Literal(String s, URI typeuri) { try { RepositoryConnection con = currentRepository.getConnection(); try { ValueFactory vf = con.getValueFactory(); if (typeuri == null) { return vf.createLiteral(s); } else { return vf.createLiteral(s, typeuri); } } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); return null; } } /** * Untyped Literal factory * * @param s the literal * @return */ public org.openrdf.model.Literal Literal(String s) { return Literal(s, null); } /** * URIref factory * * @param uri * @return */ public URI URIref(String uri) { try { RepositoryConnection con = currentRepository.getConnection(); try { ValueFactory vf = con.getValueFactory(); return vf.createURI(uri); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); return null; } } /** * BNode factory * * @return */ public BNode bnode() { try { RepositoryConnection con = currentRepository.getConnection(); try { ValueFactory vf = con.getValueFactory(); return vf.createBNode(); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); return null; } } /** * Insert Triple/Statement into graph * * @param s subject uriref * @param p predicate uriref * @param o value object (URIref or Literal) * @param contexts varArgs context objects (use default graph if null) */ public void add(Resource s, URI p, Value o, Resource... contexts) { if (log.isDebugEnabled()) log.debug( "[SesameDataSet:add] Add triple (" + s.stringValue() + ", " + p.stringValue() + ", " + o.stringValue() + ")."); try { RepositoryConnection con = currentRepository.getConnection(); try { ValueFactory myFactory = con.getValueFactory(); Statement st = myFactory.createStatement((Resource) s, p, (Value) o); con.add(st, contexts); con.commit(); } catch (Exception e) { e.printStackTrace(); } finally { con.close(); } } catch (Exception e) { // handle exception } } public void remove(Resource s, URI p, Value o, Resource... context) { try { RepositoryConnection con = currentRepository.getConnection(); try { ValueFactory myFactory = con.getValueFactory(); Statement st = myFactory.createStatement((Resource) s, p, (Value) o); con.remove(st, context); } finally { con.close(); } } catch (Exception e) { // handle exception } } /** * Import RDF data from a string * * @param rdfstring string with RDF data * @param format RDF format of the string (used to select parser) */ public void addString(String rdfstring, RDFFormat format) { try { RepositoryConnection con = currentRepository.getConnection(); try { StringReader sr = new StringReader(rdfstring); con.add(sr, "", format); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } } /** * Import RDF data from a file * * @param location of file (/path/file) with RDF data * @param format RDF format of the string (used to select parser) */ public void addFile(String filepath, RDFFormat format) { try { RepositoryConnection con = currentRepository.getConnection(); try { con.add(new File(filepath), "", format); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } } /** * Import data from URI source Request is made with proper HTTP ACCEPT header and will follow * redirects for proper LOD source negotiation * * @param urlstring absolute URI of the data source * @param format RDF format to request/parse from data source */ public void addURI(String urlstring, RDFFormat format) { try { RepositoryConnection con = currentRepository.getConnection(); try { URL url = new URL(urlstring); URLConnection uricon = (URLConnection) url.openConnection(); uricon.addRequestProperty("accept", format.getDefaultMIMEType()); InputStream instream = uricon.getInputStream(); con.add(instream, urlstring, format); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } } /** * Dump RDF graph * * @param out output stream for the serialization * @param outform the RDF serialization format for the dump * @return */ public void dumpRDF(OutputStream out, RDFFormat outform) { try { RepositoryConnection con = currentRepository.getConnection(); try { RDFWriter w = Rio.createWriter(outform, out); con.export(w); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } } /** * dump RDF graph * * @param filePath destination file for the serialization * @param outform the RDF serialization format for the dump * @return */ public void dumpRDF(String filePath, RDFFormat outform) { OutputStream output; try { output = new FileOutputStream(filePath); dumpRDF(output, outform); try { output.close(); } catch (IOException e) { e.printStackTrace(); } } catch (FileNotFoundException e) { e.printStackTrace(); } } public String printRDF(RDFFormat outform) { try { RepositoryConnection con = currentRepository.getConnection(); try { ByteArrayOutputStream out = new ByteArrayOutputStream(); RDFWriter w = Rio.createWriter(outform, out); con.export(w); String result = new String(out.toByteArray(), "UTF-8"); return result; } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } return null; } /** * Convenience URI import for RDF/XML sources * * @param urlstring absolute URI of the data source */ public void addURI(String urlstring) { addURI(urlstring, RDFFormat.RDFXML); } /** * Tuple pattern query - find all statements with the pattern, where null is a wildcard * * @param s subject (null for wildcard) * @param p predicate (null for wildcard) * @param o object (null for wildcard) * @param contexts varArgs contexts (use default graph if null) * @return serialized graph of results */ public List<Statement> tuplePattern(Resource s, URI p, Value o, Resource... contexts) { try { RepositoryConnection con = currentRepository.getConnection(); try { RepositoryResult<Statement> repres = con.getStatements(s, p, o, true, contexts); ArrayList<Statement> reslist = new ArrayList<Statement>(); while (repres.hasNext()) { reslist.add(repres.next()); } return reslist; } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } return null; } /** * Execute a CONSTRUCT/DESCRIBE SPARQL query against the graphs * * @param qs CONSTRUCT or DESCRIBE SPARQL query * @param format the serialization format for the returned graph * @return serialized graph of results */ public String runSPARQL(String qs, RDFFormat format) { try { RepositoryConnection con = currentRepository.getConnection(); try { GraphQuery query = con.prepareGraphQuery(org.openrdf.query.QueryLanguage.SPARQL, qs); StringWriter stringout = new StringWriter(); RDFWriter w = Rio.createWriter(format, stringout); query.evaluate(w); return stringout.toString(); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } return null; } /** * Execute a SELECT SPARQL query against the graphs * * @param qs SELECT SPARQL query * @return list of solutions, each containing a hashmap of bindings */ public List<HashMap<String, Value>> runSPARQL(String qs) { try { RepositoryConnection con = currentRepository.getConnection(); try { TupleQuery query = con.prepareTupleQuery(org.openrdf.query.QueryLanguage.SPARQL, qs); TupleQueryResult qres = query.evaluate(); ArrayList<HashMap<String, Value>> reslist = new ArrayList<HashMap<String, Value>>(); while (qres.hasNext()) { BindingSet b = qres.next(); Set<String> names = b.getBindingNames(); HashMap<String, Value> hm = new HashMap<String, Value>(); for (String n : names) { hm.put(n, b.getValue(n)); } reslist.add(hm); } return reslist; } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } return null; } /** * Execute CONSTRUCT/DESCRIBE SPARQL queries against the graph from a SPARQL request file. This * file contains only one request. // TODO : up this limit * * @param pathToFile path to SPARQL request file * @return list of solutions, each containing a hashmap of bindings */ public String runSPARQLFromFile(String pathToSPARQLFile, RDFFormat format) { // Read SPARQL request String s = null; StringBuffer sb = new StringBuffer(); try { FileReader fr = new FileReader(new File(pathToSPARQLFile)); // be sure to not have line starting with "--" or "/*" or any other // non aplhabetical character BufferedReader br = new BufferedReader(fr); while ((s = br.readLine()) != null) { sb.append(s); } br.close(); } catch (Exception e) { e.printStackTrace(); } if (log.isDebugEnabled()) log.debug("[Graph:runSPARQLFromFile] SPARQL query : " + sb.toString()); return runSPARQL(sb.toString(), format); } /** * Execute SELECT SPARQL queries against the graph from a SPARQL request file. This file contains * only one request. // TODO : up this limit * * @param pathToFile path to SPARQL request file * @return list of solutions, each containing a hashmap of bindings */ public List<HashMap<String, Value>> runSPARQLFromFile(String pathToSPARQLFile) { // Read SPARQL request String s = null; StringBuffer sb = new StringBuffer(); try { FileReader fr = new FileReader(new File(pathToSPARQLFile)); // be sure to not have line starting with "--" or "/*" or any other // non aplhabetical character BufferedReader br = new BufferedReader(fr); while ((s = br.readLine()) != null) { sb.append(s); } br.close(); } catch (Exception e) { e.printStackTrace(); } if (log.isDebugEnabled()) log.debug("[Graph:runSPARQLFromFile] SPARQL query : " + sb.toString()); return runSPARQL(sb.toString()); } /** * Close current repository. * * @throws RepositoryException */ public void closeRepository() throws RepositoryException { currentRepository.shutDown(); } /** Return the number of triples in the repository. */ public int getSize() { return tuplePattern(null, null, null).size(); } public String toString() { String result = "{[SimpleGraph:toString] triples = "; List<Statement> triples = tuplePattern(null, null, null); for (Object o : triples) result += o + System.getProperty("line.separator"); result += "}"; return result; } }
public class SimpleSesameGraph { Repository therepository = null; // useful -local- constants static RDFFormat NTRIPLES = RDFFormat.NTRIPLES; static RDFFormat N3 = RDFFormat.N3; static RDFFormat RDFXML = RDFFormat.RDFXML; static String RDFTYPE = RDF.TYPE.toString(); /** In memory Sesame repository without type inferencing */ public SimpleSesameGraph() { this(false); } /** * In memory Sesame Repository with optional inferencing * * @param inferencing determines whether we load the inferencer or not */ public SimpleSesameGraph(boolean inferencing) { try { if (inferencing) { therepository = new SailRepository(new ForwardChainingRDFSInferencer(new MemoryStore())); } else { therepository = new SailRepository(new MemoryStore()); } therepository.initialize(); } catch (RepositoryException e) { e.printStackTrace(); } } /** * Literal factory * * @param s the literal value * @param typeuri uri representing the type (generally xsd) * @return Literal type */ public org.openrdf.model.Literal Literal(String s, URI typeuri) { try { RepositoryConnection con = therepository.getConnection(); try { ValueFactory vf = con.getValueFactory(); if (typeuri == null) { return vf.createLiteral(s); } else { return vf.createLiteral(s, typeuri); } } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); return null; } } /** * Untyped Literal factory * * @param s the literal * @return Literal value */ public org.openrdf.model.Literal Literal(String s) { return Literal(s, null); } /** * UR Iref factory * * @param uri to build * @return URI resource */ public URI URIref(String uri) { try { RepositoryConnection con = therepository.getConnection(); try { ValueFactory vf = con.getValueFactory(); return vf.createURI(uri); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); return null; } } /** * BNode factory * * @return BNode a blank node */ public BNode bnode() { try { RepositoryConnection con = therepository.getConnection(); try { ValueFactory vf = con.getValueFactory(); return vf.createBNode(); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); return null; } } /** * dump RDF graph * * @param out output stream for the serialization * @param outform the RDF serialization format for the dump */ public void dumpRDF(OutputStream out, RDFFormat outform) { try { RepositoryConnection con = therepository.getConnection(); try { RDFWriter w = Rio.createWriter(outform, out); con.export(w); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } } /** * Convenience URI import for RDF/XML sources * * @param urlstring absolute URI of the data source */ public void addURI(String urlstring) { addURI(urlstring, RDFFormat.RDFXML); } /** * Import data from URI source Request is made with proper HTTP ACCEPT header and will follow * redirects for proper LOD source negotiation * * @param urlstring absolute URI of the data source * @param format RDF format to request/parse from data source */ public void addURI(String urlstring, RDFFormat format) { try { RepositoryConnection con = therepository.getConnection(); try { URL url = new URL(urlstring); URLConnection uricon = url.openConnection(); uricon.addRequestProperty("accept", format.getDefaultMIMEType()); InputStream instream = uricon.getInputStream(); con.add(instream, urlstring, format); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } } /** * Import RDF data from a string * * @param rdfstring string with RDF data * @param format RDF format of the string (used to select parser) */ public void addString(String rdfstring, RDFFormat format) { try { RepositoryConnection con = therepository.getConnection(); try { StringReader sr = new StringReader(rdfstring); con.add(sr, "", format); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } } /** * Import RDF data from a file * * @param filepath of file (/path/file) with RDF data * @param format RDF format of the string (used to select parser) */ public void addFile(String filepath, RDFFormat format) { try { RepositoryConnection con = therepository.getConnection(); try { con.add(new File(filepath), "", format); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } } /** * Insert Triple/Statement into graph * * @param s subject uriref * @param p predicate uriref * @param o value object (URIref or Literal) */ public void add(URI s, URI p, Value o) { try { RepositoryConnection con = therepository.getConnection(); try { ValueFactory myFactory = con.getValueFactory(); Statement st = myFactory.createStatement(s, p, o); con.add(st); } finally { con.close(); } } catch (Exception e) { // handle exception } } /** * Tuple pattern query - find all statements with the pattern, where null is a wild card * * @param s subject (null for wildcard) * @param p predicate (null for wildcard) * @param o object (null for wildcard) * @return serialized graph of results */ public List<Statement> tuplePattern(URI s, URI p, Value o) { try { RepositoryConnection con = therepository.getConnection(); try { RepositoryResult<Statement> repres = con.getStatements(s, p, o, true); ArrayList<Statement> reslist = new ArrayList<Statement>(); while (repres.hasNext()) { reslist.add(repres.next()); } return reslist; } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } return null; } /** * Execute a CONSTRUCT/DESCRIBE SPARQL query against the graph * * @param qs CONSTRUCT or DESCRIBE SPARQL query * @param format the serialization format for the returned graph * @return serialized graph of results */ public String runSPARQL(String qs, RDFFormat format) { try { RepositoryConnection con = therepository.getConnection(); try { GraphQuery query = con.prepareGraphQuery(org.openrdf.query.QueryLanguage.SPARQL, qs); StringWriter stringout = new StringWriter(); RDFWriter w = Rio.createWriter(format, stringout); query.evaluate(w); return stringout.toString(); } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } return null; } /** * Execute a SELECT SPARQL query against the graph * * @param qs SELECT SPARQL query * @return list of solutions, each containing a hashmap of bindings */ public List<HashMap<Object, Value>> runSPARQL(String qs) { try { RepositoryConnection con = therepository.getConnection(); try { TupleQuery query = con.prepareTupleQuery(org.openrdf.query.QueryLanguage.SPARQL, qs); TupleQueryResult qres = query.evaluate(); ArrayList<HashMap<Object, Value>> reslist = new ArrayList<HashMap<Object, Value>>(); while (qres.hasNext()) { BindingSet b = qres.next(); Set<String> names = b.getBindingNames(); HashMap<Object, Value> hm = new HashMap<Object, Value>(); for (Object n : names) { hm.put(n, b.getValue((String) n)); } reslist.add(hm); } return reslist; } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } return null; } public static void main(String[] args) { // create a graph with type inferencing SimpleSesameGraph g = new SimpleSesameGraph(true); // //load the film schema and the example data // g.addFile("../roadstead_ontologies/film-ontology.owl", SimpleSesameGraph.RDFXML); // try loading the geopolitical ontology g.addURI("http://aims.fao.org/aos/geopolitical.owl"); // List<HashMap<Object, Value>> solutions = g.runSPARQL("SELECT ?who WHERE { " + // "?who <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> // <http://roadstead.org/film#Person> ." + // "}"); /* Here we are looking for the names of all the things with a name */ List<HashMap<Object, Value>> solutions = g.runSPARQL( "SELECT ?name WHERE { " + "?what <http://aims.fao.org/aos/geopolitical.owl#territory> ?name ." + "?what <http://aims.fao.org/aos/geopolitical.owl#nameListEN> ?name ." + "}"); for (HashMap<Object, Value> object : solutions) { for (Value value : object.values()) { System.out.println("country: " + value.stringValue()); } } } }