public Collection<String> getAllDisjointProperties(String startingProperty) throws RepositoryException, MalformedQueryException, QueryEvaluationException { Set<String> allDisjointProperties = new HashSet<>(); RepositoryConnection con = this.repository.getConnection(); try { TupleQuery allDisjointPropertiesQuery = con.prepareTupleQuery( QueryLanguage.SPARQL, "SELECT ?items " + "WHERE { ?y a owl:AllDisjointProperties ." + "?y owl:members ?members ." + "?members rdf:rest*/rdf:first <" + startingProperty + "> ." + "?members rdf:rest*/rdf:first ?items ." + "FILTER (?items != <" + startingProperty + ">) .}"); TupleQueryResult allDisjointPropertiesResult = allDisjointPropertiesQuery.evaluate(); try { while (allDisjointPropertiesResult.hasNext()) { BindingSet bindingSet = allDisjointPropertiesResult.next(); Value name = bindingSet.getValue("items"); allDisjointProperties.add(name.toString()); } } finally { allDisjointPropertiesResult.close(); } } finally { con.close(); } return allDisjointProperties; }
public static ArrayList<QueryData> parseOSMOQueryData(TupleQueryResult qres) { ArrayList<QueryData> queryDataList = new ArrayList<QueryData>(); try { while (qres.hasNext()) { BindingSet b = qres.next(); Set names = b.getBindingNames(); QueryData queryData = new QueryData(); for (Object n : names) { if (((String) n).equalsIgnoreCase("queryID")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); queryData.setId(str); System.out.print("srvcStatus id: " + queryData.getId() + " "); } else if (((String) n).equalsIgnoreCase("queryString")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); queryData.setQueryString(str); System.out.print("srvcStatusTime : " + queryData.getQueryString() + " "); } } queryDataList.add(queryData); } // while return queryDataList; } catch (QueryEvaluationException e) { e.printStackTrace(); return null; } catch (Exception e) { e.printStackTrace(); return null; } }
public Collection<String> getIntersectionOf(String startingClass) throws RepositoryException, MalformedQueryException, QueryEvaluationException { Set<String> intersectionOfClasses = new HashSet<>(); RepositoryConnection con = this.repository.getConnection(); try { TupleQuery intersectionOfQuery = con.prepareTupleQuery( QueryLanguage.SPARQL, "SELECT ?c WHERE { ?c owl:intersectionOf ?l . " + "?l rdf:rest*/rdf:first <" + startingClass + "> .}"); TupleQueryResult intersectionOfResult = intersectionOfQuery.evaluate(); try { while (intersectionOfResult.hasNext()) { BindingSet bindingSet = intersectionOfResult.next(); Value name = bindingSet.getValue("c"); intersectionOfClasses.add(name.toString()); // System.out.println("intersections Ofs : " + name); } } finally { intersectionOfResult.close(); } } finally { con.close(); } return intersectionOfClasses; }
/** * Execute a SELECT SPARQL query against the graphs * * @param qs SELECT SPARQL query * @return list of solutions, each containing a hashmap of bindings */ public List<HashMap<String, Value>> runSPARQL(String qs) { try { RepositoryConnection con = currentRepository.getConnection(); try { TupleQuery query = con.prepareTupleQuery(org.openrdf.query.QueryLanguage.SPARQL, qs); TupleQueryResult qres = query.evaluate(); ArrayList<HashMap<String, Value>> reslist = new ArrayList<HashMap<String, Value>>(); while (qres.hasNext()) { BindingSet b = qres.next(); Set<String> names = b.getBindingNames(); HashMap<String, Value> hm = new HashMap<String, Value>(); for (String n : names) { hm.put(n, b.getValue(n)); } reslist.add(hm); } return reslist; } finally { con.close(); } } catch (Exception e) { e.printStackTrace(); } return null; }
public void tupleQuery() throws QueryEvaluationException, RepositoryException, MalformedQueryException { // /query repo // con.setNamespace("onto", // "<http://it.unibz.krdb/obda/ontologies/test/translation/onto2.owl#>"); // System.out.println(con.getNamespaces().next().toString()); String queryString = "PREFIX : \n<http://it.unibz.krdb/obda/ontologies/test/translation/onto2.owl#>\n " + "SELECT ?x ?y WHERE { ?x a :Person. ?x :age ?y } "; // String queryString = // "SELECT ?x ?y WHERE { ?x a onto:Person. ?x onto:age ?y } "; TupleQuery tupleQuery = (con).prepareTupleQuery(QueryLanguage.SPARQL, queryString); TupleQueryResult result = tupleQuery.evaluate(); System.out.println(result.getBindingNames()); while (result.hasNext()) { BindingSet bindingSet = result.next(); Value valueOfX = bindingSet.getValue("x"); Literal valueOfY = (Literal) bindingSet.getValue("y"); System.out.println(valueOfX.stringValue() + ", " + valueOfY.floatValue()); } result.close(); }
/** * @param queryString * @param mapper * @return * @throws RdfException */ public <T> List<T> queryList(String queryString, BindingSetMapper<T> mapper) throws RdfException { RepositoryConnection repositoryConnection = null; TupleQueryResult result = null; List<T> objects = new LinkedList<T>(); try { repositoryConnection = getConnection(); TupleQuery query = repositoryConnection.prepareTupleQuery(QueryLanguage.SPARQL, queryString); result = query.evaluate(); int row = 0; while (result.hasNext()) { final T mapped = mapper.map(result.next(), row++); if (mapped != null) { objects.add(mapped); } } } catch (RepositoryException e) { throw new RdfException(e); } catch (MalformedQueryException e) { throw new RdfException(e); } catch (QueryEvaluationException e) { throw new RdfException(e); } finally { cleanup(result); } return objects; }
private void calculateGraphsAndDataSets() { try { RepositoryConnection conn = repository.getConnection(); TupleQuery q = conn.prepareTupleQuery(QueryLanguage.SPARQL, SparqlUtils.PREFIXES + QUERY); TupleQueryResult results = q.evaluate(); graphs = new LinkedList<DataCubeGraph>(); datasets = new LinkedList<DataSet>(); String lastG = null; String lastDSD = null; SparqlDCGraph dcGraph; SparqlStructure dcStructure = null; Collection<DataSet> graphDataSets = null; Collection<Structure> graphStructures = null; Collection<DataSet> structDataSets = null; while (results.hasNext()) { BindingSet set = results.next(); String g = set.getValue("g").stringValue(); String ds = set.getValue("ds").stringValue(); String dsd = set.getValue("dsd").stringValue(); if (!g.equals(lastG)) { // new Graph dcGraph = new SparqlDCGraph(repository, g); graphDataSets = new LinkedList<DataSet>(); graphStructures = new LinkedList<Structure>(); dcGraph.setDatasets(graphDataSets); dcGraph.setStructures(graphStructures); graphs.add(dcGraph); // new structure dcStructure = new SparqlStructure(repository, dsd, g); structDataSets = new LinkedList<DataSet>(); dcStructure.setDatasets(structDataSets); graphStructures.add(dcStructure); } else if (!dsd.equals(lastDSD)) { // new structure dcStructure = new SparqlStructure(repository, dsd, g); structDataSets = new LinkedList<DataSet>(); dcStructure.setDatasets(structDataSets); graphStructures.add(dcStructure); } SparqlDataSet dcDataSet = new SparqlDataSet(repository, ds, g); dcDataSet.setStructure(dcStructure); graphDataSets.add(dcDataSet); structDataSets.add(dcDataSet); datasets.add(dcDataSet); lastG = g; lastDSD = dsd; } } catch (RepositoryException ex) { Logger.getLogger(SparqlDCRepository.class.getName()).log(Level.SEVERE, null, ex); } catch (MalformedQueryException ex) { Logger.getLogger(SparqlDCRepository.class.getName()).log(Level.SEVERE, null, ex); } catch (QueryEvaluationException ex) { Logger.getLogger(SparqlDCRepository.class.getName()).log(Level.SEVERE, null, ex); } }
// needed by TupleQuery interface public void evaluate(TupleQueryResultHandler handler) throws QueryEvaluationException, TupleQueryResultHandlerException { TupleQueryResult result = evaluate(); handler.startQueryResult(result.getBindingNames()); while (result.hasNext()) { handler.handleSolution(result.next()); } handler.endQueryResult(); }
/** * * Prints the query results of result. * * @param result * @throws QueryEvaluationException */ public static void printTupleQueryResult(TupleQueryResult result) throws QueryEvaluationException { while (result.hasNext()) { /* * iterate over the result and print the * bindings. */ BindingSet bindingSet = result.next(); Value valueOfX = bindingSet.getValue("x"); Value valueOfY = bindingSet.getValue("y"); System.out.print("value of ?x " + valueOfX + " ++++++ "); System.out.println("value of ?y " + valueOfY); } }
/** * Runs a SPARQL SELECT query (specified by a query string) on the repository. * * @param sparqlString SELECT query. * @return Sesame tuple query result set. * @throws RepositoryException If no connection could be established or connection fails. * @throws MalformedQueryException On query-related errors. * @throws QueryEvaluationException On query-related errors. */ public List<BindingSet> plainSparqlQuery(String sparqlString) throws RepositoryException, MalformedQueryException, QueryEvaluationException { RepositoryConnection conn = repo.getConnection(); TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, sparqlString); List<BindingSet> ret = new ArrayList<BindingSet>(); TupleQueryResult res = query.evaluate(); while (res.hasNext()) ret.add(res.next()); conn.close(); return ret; }
public List<String> getValuesFor(URI context, URI predicate) throws RepositoryException { RepositoryConnection con = repository.getConnection(); List<String> values = new ArrayList<String>(); try { TupleQuery query = con.prepareTupleQuery( QueryLanguage.SPARQL, "SELECT distinct ?v WHERE {GRAPH <" + context + "> {?d <" + predicate + "> ?v}}"); TupleQueryResult statements = query.evaluate(); while (statements.hasNext()) values.add(statements.next().getBinding("v").getValue().stringValue()); statements.close(); } finally { con.close(); return values; } }
@Override public int runQuery(Query q, int run) throws Exception { TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, q.getQuery()); TupleQueryResult res = (TupleQueryResult) query.evaluate(); int resCounter = 0; try { while (res.hasNext()) { if (isInterrupted()) throw new QueryEvaluationException("Thread has been interrupted."); BindingSet bindings = res.next(); resCounter++; earlyResults.handleResult(bindings, resCounter); } } finally { res.close(); } return resCounter; }
private void cleanup(TupleQueryResult result) { try { if (result != null) { result.close(); } } catch (Exception ex) { throw new IllegalStateException(ex); } }
@Override public boolean hasNext() { boolean res; try { res = result.hasNext(); } catch (QueryEvaluationException e) { throw new RuntimeException(e); } return res; }
@Override public QuerySolution next() { BindingSet bs; try { bs = result.next(); } catch (QueryEvaluationException e) { throw new RuntimeException(e); } return new AGQuerySolution(bs, model); }
@Override public int runQueryDebug(Query q, int run, boolean showResult) throws Exception { TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, q.getQuery()); TupleQueryResult res = (TupleQueryResult) query.evaluate(); int resCounter = 0; TupleQueryResultWriter writer = null; boolean writerStarted = false; if (showResult) { OutputStream results = new FileOutputStream( Config.getConfig().getBaseDir() + "/result/" + q.getIdentifier() + "_" + run + ".csv"); TupleQueryResultWriterFactory factory = new SPARQLResultsCSVWriterFactory(); writer = factory.getWriter(results); } while (res.hasNext()) { if (isInterrupted()) throw new QueryEvaluationException("Thread has been interrupted."); BindingSet bindings = res.next(); if (showResult) { if (!writerStarted) { writer.startQueryResult(res.getBindingNames()); writerStarted = true; } writer.handleSolution(bindings); } resCounter++; earlyResults.handleResult(bindings, resCounter); } if (writerStarted) writer.endQueryResult(); return resCounter; }
@Override public List<SesameMatch> runQuery(final Query query, final String queryDefinition) throws RepositoryException, MalformedQueryException, QueryEvaluationException { final List<SesameMatch> results = new ArrayList<>(); TupleQueryResult queryResults; tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, queryDefinition); queryResults = tupleQuery.evaluate(); try { while (queryResults.hasNext()) { final BindingSet bs = queryResults.next(); final SesameMatch match = SesameMatch.createMatch(query, bs); results.add(match); } } finally { queryResults.close(); } return results; }
public static ArrayList<WidgetPresentationData> parseWidgetPreListByService( TupleQueryResult qres) { ArrayList<WidgetPresentationData> widgetPresentationDataList = new ArrayList<WidgetPresentationData>(); try { while (qres.hasNext()) { BindingSet b = qres.next(); Set names = b.getBindingNames(); WidgetPresentationData widgetPreData = new WidgetPresentationData(); for (Object n : names) { if (((String) n).equalsIgnoreCase("widgetPreID")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); widgetPreData.setId(str); System.out.print("widgetPreID: " + widgetPreData.getId() + " "); } else if (((String) n).equalsIgnoreCase("widgetID")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); widgetPreData.setWidgetID(str); System.out.print("widgetID: " + widgetPreData.getWidgetID() + " "); } else if (((String) n).equalsIgnoreCase("widgetAttrID")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); widgetPreData.setWidgetAttrID(str); System.out.print("widgetAttr: " + widgetPreData.getWidgetAttrID() + " "); } } widgetPresentationDataList.add(widgetPreData); } // while return widgetPresentationDataList; } catch (QueryEvaluationException e) { e.printStackTrace(); return null; } catch (Exception e) { e.printStackTrace(); return null; } }
public TestSuite createTestSuite() throws Exception { // Create test suite TestSuite suite = new TestSuite(N3ParserTestCase.class.getName()); // Add the manifest to a repository and query it Repository repository = new SailRepository(new MemoryStore()); repository.initialize(); RepositoryConnection con = repository.getConnection(); URL url = url(MANIFEST_URL); con.add(url, base(MANIFEST_URL), RDFFormat.TURTLE); // Add all positive parser tests to the test suite String query = "SELECT testURI, inputURL, outputURL " + "FROM {testURI} rdf:type {n3test:PositiveParserTest}; " + " n3test:inputDocument {inputURL}; " + " n3test:outputDocument {outputURL} " + "USING NAMESPACE n3test = <http://www.w3.org/2004/11/n3test#>"; TupleQueryResult queryResult = con.prepareTupleQuery(QueryLanguage.SERQL, query).evaluate(); while (queryResult.hasNext()) { BindingSet bindingSet = queryResult.next(); String testURI = bindingSet.getValue("testURI").toString(); String inputURL = bindingSet.getValue("inputURL").toString(); String outputURL = bindingSet.getValue("outputURL").toString(); suite.addTest(new PositiveParserTest(testURI, inputURL, outputURL)); } queryResult.close(); // Add all negative parser tests to the test suite query = "SELECT testURI, inputURL " + "FROM {testURI} rdf:type {n3test:NegativeParserTest}; " + " n3test:inputDocument {inputURL} " + "USING NAMESPACE n3test = <http://www.w3.org/2004/11/n3test#>"; queryResult = con.prepareTupleQuery(QueryLanguage.SERQL, query).evaluate(); while (queryResult.hasNext()) { BindingSet bindingSet = queryResult.next(); String testURI = bindingSet.getValue("testURI").toString(); String inputURL = bindingSet.getValue("inputURL").toString(); suite.addTest(new NegativeParserTest(testURI, inputURL)); } queryResult.close(); con.close(); repository.shutDown(); return suite; }
public static ArrayList<RootOsmoData> parseOSMOListOfOAMO(TupleQueryResult qres) { ArrayList<RootOsmoData> osmoDataList = new ArrayList<RootOsmoData>(); try { while (qres.hasNext()) { BindingSet b = qres.next(); Set names = b.getBindingNames(); RootOsmoData osmoData = new RootOsmoData(); for (Object n : names) { if (((String) n).equalsIgnoreCase("serviceID")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); osmoData.setId(str); System.out.print("serviceID: " + osmoData.getId() + " "); } else if (((String) n).equalsIgnoreCase("srvcName")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); osmoData.setName(str); System.out.print("srvcName : " + osmoData.getName() + " "); } else if (((String) n).equalsIgnoreCase("srvcDesc")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); osmoData.setDesc(str); System.out.print("srvcDesc : " + osmoData.getDesc() + " "); } } osmoDataList.add(osmoData); } // while return osmoDataList; } catch (QueryEvaluationException e) { e.printStackTrace(); return null; } catch (Exception e) { e.printStackTrace(); return null; } }
public static RootOAMOData parseOAMORootData(TupleQueryResult qres) { RootOAMOData rootOAMOData = new RootOAMOData(); try { // while (qres.hasNext()) // { BindingSet b = qres.next(); Set names = b.getBindingNames(); for (Object n : names) { if (((String) n).equalsIgnoreCase("oamoName")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); rootOAMOData.setOamoName(str); System.out.print("oamoName: " + rootOAMOData.getOamoName()); } else if (((String) n).equalsIgnoreCase("userID")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); rootOAMOData.setUserID(str); System.out.print("userID: " + rootOAMOData.getUserID()); } // else if(((String) n).equalsIgnoreCase("serviceID")) // { // String str = (b.getValue((String) n)==null) ? null : // b.getValue((String) n).stringValue(); // rootOAMOData.setServiceID(str); // System.out.print("serviceID: "+rootOAMOData.getServiceID()); // } else if (((String) n).equalsIgnoreCase("oamoDesc")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); rootOAMOData.setOamoDesc(str); System.out.println("oamoDesc : " + rootOAMOData.getOamoDesc() + " "); } else if (((String) n).equalsIgnoreCase("oamoGraphMeta")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); rootOAMOData.setOamoGraphMeta(str); System.out.println("oamoGraphMeta : " + rootOAMOData.getOamoGraphMeta() + " "); } } // }//while return rootOAMOData; } catch (QueryEvaluationException e) { e.printStackTrace(); return null; } catch (Exception e) { e.printStackTrace(); return null; } }
public static ArrayList<ServiceStatusData> parseServiceStatusOfOSMO(TupleQueryResult qres) { ArrayList<ServiceStatusData> serviceStatusDataList = new ArrayList<ServiceStatusData>(); try { while (qres.hasNext()) { BindingSet b = qres.next(); Set names = b.getBindingNames(); ServiceStatusData srvcStatusData = new ServiceStatusData(); for (Object n : names) { if (((String) n).equalsIgnoreCase("srvcStatusID")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); srvcStatusData.setSrvcStatusID(str); System.out.print("srvcStatus id: " + srvcStatusData.getSrvcStatusID() + " "); } else if (((String) n).equalsIgnoreCase("srvcStatusTime")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); srvcStatusData.setSrvcStatusTime(str); System.out.print("srvcStatusTime : " + srvcStatusData.getSrvcStatusTime() + " "); } else if (((String) n).equalsIgnoreCase("srvcStatus")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); srvcStatusData.setSrvcStatus(str); System.out.print("srvcStatusStatus : " + srvcStatusData.getSrvcStatus() + " "); } } serviceStatusDataList.add(srvcStatusData); } // while return serviceStatusDataList; } catch (QueryEvaluationException e) { e.printStackTrace(); return null; } catch (Exception e) { e.printStackTrace(); return null; } }
public static ArrayList<WidgetAttr> parseWidgetAttributes(TupleQueryResult qres) { ArrayList<WidgetAttr> widgetAttrList = new ArrayList<WidgetAttr>(); try { while (qres.hasNext()) { BindingSet b = qres.next(); Set names = b.getBindingNames(); WidgetAttr widgetAttr = new WidgetAttr(); for (Object n : names) { if (((String) n).equalsIgnoreCase("widgetAttrID")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); widgetAttr.setId(str); System.out.print("widgetattr id: " + widgetAttr.getId() + " "); } else if (((String) n).equalsIgnoreCase("widgetAttrName")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); widgetAttr.setName(str); System.out.print("widgetAttrName: " + widgetAttr.getName() + " "); } else if (((String) n).equalsIgnoreCase("widgetAttrDesc")) { String str = (b.getValue((String) n) == null) ? null : b.getValue((String) n).stringValue(); widgetAttr.setValue(str); System.out.print("widgetAttrDesc: " + widgetAttr.getValue() + " "); } } widgetAttrList.add(widgetAttr); } // while return widgetAttrList; } catch (QueryEvaluationException e) { e.printStackTrace(); return null; } catch (Exception e) { e.printStackTrace(); return null; } }
protected static String getManifestName( Repository manifestRep, RepositoryConnection con, String manifestFileURL) throws QueryEvaluationException, RepositoryException, MalformedQueryException { // Try to extract suite name from manifest file TupleQuery manifestNameQuery = con.prepareTupleQuery( QueryLanguage.SERQL, "SELECT ManifestName FROM {ManifestURL} rdfs:label {ManifestName}"); manifestNameQuery.setBinding( "ManifestURL", manifestRep.getValueFactory().createURI(manifestFileURL)); TupleQueryResult manifestNames = manifestNameQuery.evaluate(); try { if (manifestNames.hasNext()) { return manifestNames.next().getValue("ManifestName").stringValue(); } } finally { manifestNames.close(); } // Derive name from manifest URL int lastSlashIdx = manifestFileURL.lastIndexOf('/'); int secLastSlashIdx = manifestFileURL.lastIndexOf('/', lastSlashIdx - 1); return manifestFileURL.substring(secLastSlashIdx + 1, lastSlashIdx); }
protected void getTerms( Repository repos, String name, String uri, List<RDFSClass> classes, List<RDFSProperty> properties) throws VocabularyImportException { try { RepositoryConnection con = repos.getConnection(); try { TupleQuery query = con.prepareTupleQuery(QueryLanguage.SPARQL, CLASSES_QUERY_P1 + uri + CLASSES_QUERY_P2); TupleQueryResult res = query.evaluate(); Set<String> seen = new HashSet<String>(); while (res.hasNext()) { BindingSet solution = res.next(); String clazzURI = solution.getValue("resource").stringValue(); if (seen.contains(clazzURI)) { continue; } seen.add(clazzURI); String label = getFirstNotNull( new Value[] {solution.getValue("en_label"), solution.getValue("label")}); String description = getFirstNotNull( new Value[] { solution.getValue("en_definition"), solution.getValue("definition"), solution.getValue("en_description"), solution.getValue("description") }); RDFSClass clazz = new RDFSClass(clazzURI, label, description, name, uri); classes.add(clazz); } query = con.prepareTupleQuery( QueryLanguage.SPARQL, PROPERTIES_QUERY_P1 + uri + PROPERTIES_QUERY_P2); res = query.evaluate(); seen = new HashSet<String>(); while (res.hasNext()) { BindingSet solution = res.next(); String propertyUri = solution.getValue("resource").stringValue(); if (seen.contains(propertyUri)) { continue; } seen.add(propertyUri); String label = getFirstNotNull( new Value[] {solution.getValue("en_label"), solution.getValue("label")}); String description = getFirstNotNull( new Value[] { solution.getValue("en_definition"), solution.getValue("definition"), solution.getValue("en_description"), solution.getValue("description") }); RDFSProperty prop = new RDFSProperty(propertyUri, label, description, name, uri); properties.add(prop); } } catch (Exception ex) { throw new VocabularyImportException( "Error while processing vocabulary retrieved from " + uri, ex); } finally { con.close(); } } catch (RepositoryException ex) { throw new VocabularyImportException( "Error while processing vocabulary retrieved from " + uri, ex); } }
public void test_query() throws Exception { final BigdataSail sail = getSail(); try { sail.initialize(); if (!((BigdataSail) sail).database.getStatementIdentifiers()) { log.warn("Statement identifiers are not enabled"); return; } /* * Load data into the sail. */ { final DataLoader dataLoader = sail.database.getDataLoader(); dataLoader.loadData( "/com/bigdata/rdf/sail/provenance01.ttlx", "" /*baseURL*/, ServiceProviderHook.TURTLE_RDR); } /* * Serialize as RDF/XML. * * Note: This is just for debugging. */ if (log.isInfoEnabled()) { final BigdataStatementIterator itr = sail.database.getStatements(null, null, null); final String rdfXml; try { final Writer w = new StringWriter(); // final RDFXMLWriter rdfWriter = new RDFXMLWriter(w); final RDFWriterFactory writerFactory = RDFWriterRegistry.getInstance().get(RDFFormat.RDFXML); assertNotNull(writerFactory); final RDFWriter rdfWriter = writerFactory.getWriter(w); rdfWriter.startRDF(); while (itr.hasNext()) { final BigdataStatementImpl stmt = (BigdataStatementImpl) itr.next(); // only write the explicit statements. if (!stmt.isExplicit()) continue; rdfWriter.handleStatement(stmt); } rdfWriter.endRDF(); rdfXml = w.toString(); } finally { itr.close(); } // write the rdf/xml log.info(rdfXml); } final SailConnection conn = sail.getConnection(); try { final URI y = new URIImpl("http://www.foo.org/y"); final URI B = new URIImpl("http://www.foo.org/B"); final URI dcCreator = new URIImpl("http://purl.org/dc/terms/creator"); final Literal bryan = new LiteralImpl("bryan"); final Literal mike = new LiteralImpl("mike"); /* * This is a hand-coded query. * * Note: When statement identifiers are enabled, the only way to * bind the context position is to already have a statement on hand - * there is no index which can be used to look up a statement by its * context and the context is always a blank node. */ // final TupleExpr tupleExpr = // new Projection( // new Join(// // new StatementPattern(// // new Var("X", y),// // new Var("1", RDF.TYPE),// // new Var("2", B),// // new Var("SID")),// unbound. // new StatementPattern(// // new Var("SID"),// // new Var("3", dcCreator),// // new Var("Y"))), // new ProjectionElemList(new ProjectionElem[] { new ProjectionElem( "Y" // )})); // final String q = "select ?Y where { ?SID <"+dcCreator+"> ?Y . graph ?SID { // <"+y+"> <"+RDF.TYPE+"> <"+B+"> . } }"; final String q = "select ?Y where { <<<" + y + "> <" + RDF.TYPE + "> <" + B + ">>> <" + dcCreator + "> ?Y . }"; /* * Create a data set consisting of the contexts to be queried. * * Note: a [null] DataSet will cause context to be ignored when the * query is processed. */ // final DatasetImpl dataSet = null; //new DatasetImpl(); // // final BindingSet bindingSet = new QueryBindingSet(); // // final CloseableIteration<? extends BindingSet, QueryEvaluationException> itr = // conn // .evaluate(tupleExpr, dataSet, bindingSet, true/* includeInferred */); final TupleQuery tq = new BigdataSailRepository(sail) .getReadOnlyConnection() .prepareTupleQuery(QueryLanguage.SPARQL, q); final TupleQueryResult itr = tq.evaluate(); if (log.isInfoEnabled()) log.info("Verifying query."); /* * These are the expected results for the query (the bindings for Y). */ final Set<Value> expected = new HashSet<Value>(); expected.add(bryan); expected.add(mike); /* * Verify that the query results is the correct solutions. */ final int nresults = expected.size(); try { int i = 0; while (itr.hasNext()) { final BindingSet solution = itr.next(); if (log.isInfoEnabled()) log.info("solution[" + i + "] : " + solution); final Value actual = solution.getValue("Y"); assertTrue("Not expecting Y=" + actual, expected.remove(actual)); i++; } assertEquals("#results", nresults, i); } finally { itr.close(); } } finally { conn.close(); } } finally { sail.__tearDownUnitTest(); } }
// sparql public void sparql() throws IOException, RepositoryException, QueryEvaluationException { // find all expired data to avoid them participating the query: this.toDeleteCounter = 0; this.dropQueryString = ""; ArrayList<GraphIdCounterPair> expiredData = new ArrayList<GraphIdCounterPair>(); LocalTime evictionTime = LocalTime.now(); for (GraphIdCounterPair x : this.cacheContentOfGraphIds) { System.out.print( this.evictCounter + ", " + this.size + ", " + this.evictAmount + ", " + x.graphId + ", " + x.arrivalTime + ", " + x.expirationTime); if (x.expirationTime.isBefore(evictionTime)) { expiredData.add(x); dropQueryString += "drop graph <" + x.graphId + ">;"; System.out.println(", expired"); ++toDeleteCounter; } else { System.out.println(); } } System.out.println("[INFO] " + expiredData.size() + " data expired!"); if (!expiredData.isEmpty()) { // delete expired data from the cache for (GraphIdCounterPair x : expiredData) { this.cacheContentOfGraphIds.remove(x); } // delete the expired data from the database QueryExecution qe = AGQueryExecutionFactory.create(AGQueryFactory.create(dropQueryString), model); qe.execAsk(); qe.close(); } // after deleting expired data, load the cache again this.streamEmulation(); System.out.println(this.reasoner.getEntailmentRegime()); this.infModel = new AGInfModel(this.reasoner, this.model); String queryString = "select distinct ?s " + "where { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type>" + "<http://swat.cse.lehigh.edu/onto/univ-bench.owl#Professor>.}"; AGRepositoryConnection conn = this.client.getAGConn(); TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); tupleQuery.setIncludeInferred(true); long sparqlStartTime = System.currentTimeMillis(); TupleQueryResult resultSet = tupleQuery.evaluate(); long sparqlEndTime = System.currentTimeMillis(); this.aveSparql += (sparqlEndTime - sparqlStartTime); ArrayList<String> results = new ArrayList<String>(); while (resultSet.hasNext()) { String result = resultSet.next().toString(); System.out.println("result"); int length = result.length(); results.add(result.substring(3, length - 1)); } resultSet.close(); this.fMeasureBench(results); this.infModel.close(); }
public void testIsLiteral() throws Exception { final BigdataSail sail = getSail(); sail.initialize(); final BigdataSailRepository repo = new BigdataSailRepository(sail); final BigdataSailRepositoryConnection cxn = (BigdataSailRepositoryConnection) repo.getConnection(); cxn.setAutoCommit(false); try { final ValueFactory vf = sail.getValueFactory(); URI A = vf.createURI("_:A"); URI B = vf.createURI("_:B"); URI X = vf.createURI("_:X"); URI AGE = vf.createURI("_:AGE"); Literal _25 = vf.createLiteral(25); Literal _45 = vf.createLiteral(45); cxn.add(A, RDF.TYPE, X); cxn.add(B, RDF.TYPE, X); cxn.add(A, AGE, _25); cxn.add(B, AGE, _45); /* * Note: The either flush() or commit() is required to flush the * statement buffers to the database before executing any operations * that go around the sail. */ cxn.flush(); // commit(); if (log.isInfoEnabled()) { log.info("\n" + sail.getDatabase().dumpStore()); } { String query = "select ?s ?age " + "WHERE { " + " ?s <" + RDF.TYPE + "> <" + X + "> . " + " ?s <" + AGE + "> ?age . " + " FILTER( isLiteral(?age) ) . " + "}"; final TupleQuery tupleQuery = cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); if (log.isInfoEnabled()) { final TupleQueryResult result = tupleQuery.evaluate(); log.info("results:"); if (!result.hasNext()) { log.info("no results."); } while (result.hasNext()) { log.info(result.next()); } } final TupleQueryResult result = tupleQuery.evaluate(); Collection<BindingSet> solution = new LinkedList<BindingSet>(); solution.add( createBindingSet(new Binding[] {new BindingImpl("s", A), new BindingImpl("age", _25)})); solution.add( createBindingSet(new Binding[] {new BindingImpl("s", B), new BindingImpl("age", _45)})); compare(result, solution); } { String query = "select ?s ?age " + "WHERE { " + " ?s <" + RDF.TYPE + "> <" + X + "> . " + " ?s <" + AGE + "> ?age . " + " FILTER( isLiteral(" + _25.toString() + ") ) . " + "}"; final TupleQuery tupleQuery = cxn.prepareTupleQuery(QueryLanguage.SPARQL, query); if (log.isInfoEnabled()) { final TupleQueryResult result = tupleQuery.evaluate(); log.info("results:"); if (!result.hasNext()) { log.info("no results."); } while (result.hasNext()) { log.info(result.next()); } } final TupleQueryResult result = tupleQuery.evaluate(); Collection<BindingSet> solution = new LinkedList<BindingSet>(); solution.add( createBindingSet(new Binding[] {new BindingImpl("s", A), new BindingImpl("age", _25)})); solution.add( createBindingSet(new Binding[] {new BindingImpl("s", B), new BindingImpl("age", _45)})); compare(result, solution); } } finally { cxn.close(); sail.__tearDownUnitTest(); } }
public static TestSuite suite(String manifestFileURL, Factory factory, boolean approvedOnly) throws Exception { logger.info("Building test suite for {}", manifestFileURL); TestSuite suite = new TestSuite(factory.getClass().getName()); // Read manifest and create declared test cases Repository manifestRep = new SailRepository(new MemoryStore()); manifestRep.initialize(); RepositoryConnection con = manifestRep.getConnection(); ManifestTest.addTurtle(con, new URL(manifestFileURL), manifestFileURL); suite.setName(getManifestName(manifestRep, con, manifestFileURL)); // Extract test case information from the manifest file. Note that we // only // select those test cases that are mentioned in the list. StringBuilder query = new StringBuilder(512); query.append( " SELECT DISTINCT testURI, testName, resultFile, action, queryFile, defaultGraph, ordered "); query.append(" FROM {} rdf:first {testURI} "); if (approvedOnly) { query.append(" dawgt:approval {dawgt:Approved}; "); } query.append(" mf:name {testName}; "); query.append(" mf:result {resultFile}; "); query.append(" [ mf:checkOrder {ordered} ]; "); query.append(" [ mf:requires {Requirement} ];"); query.append(" mf:action {action} qt:query {queryFile}; "); query.append(" [qt:data {defaultGraph}]; "); query.append(" [sd:entailmentRegime {Regime} ]"); // skip tests involving CSV result files, these are not query tests query.append(" WHERE NOT resultFile LIKE \"*.csv\" "); // skip tests involving JSON, sesame currently does not have a // SPARQL/JSON // parser. query.append(" AND NOT resultFile LIKE \"*.srj\" "); // skip tests involving entailment regimes query.append(" AND NOT BOUND(Regime) "); // skip test involving basic federation, these are tested separately. query.append(" AND (NOT BOUND(Requirement) OR (Requirement != mf:BasicFederation)) "); query.append(" USING NAMESPACE "); query.append(" mf = <http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#>, "); query.append(" dawgt = <http://www.w3.org/2001/sw/DataAccess/tests/test-dawg#>, "); query.append(" qt = <http://www.w3.org/2001/sw/DataAccess/tests/test-query#>, "); query.append(" sd = <http://www.w3.org/ns/sparql-service-description#>, "); query.append(" ent = <http://www.w3.org/ns/entailment/> "); TupleQuery testCaseQuery = con.prepareTupleQuery(QueryLanguage.SERQL, query.toString()); query.setLength(0); query.append(" SELECT graph "); query.append(" FROM {action} qt:graphData {graph} "); query.append(" USING NAMESPACE "); query.append(" qt = <http://www.w3.org/2001/sw/DataAccess/tests/test-query#>"); TupleQuery namedGraphsQuery = con.prepareTupleQuery(QueryLanguage.SERQL, query.toString()); query.setLength(0); query.append("SELECT 1 "); query.append(" FROM {testURI} mf:resultCardinality {mf:LaxCardinality}"); query.append( " USING NAMESPACE mf = <http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#>"); TupleQuery laxCardinalityQuery = con.prepareTupleQuery(QueryLanguage.SERQL, query.toString()); logger.debug("evaluating query.."); TupleQueryResult testCases = testCaseQuery.evaluate(); while (testCases.hasNext()) { BindingSet bindingSet = testCases.next(); URI testURI = (URI) bindingSet.getValue("testURI"); String testName = bindingSet.getValue("testName").toString(); String resultFile = bindingSet.getValue("resultFile").toString(); String queryFile = bindingSet.getValue("queryFile").toString(); URI defaultGraphURI = (URI) bindingSet.getValue("defaultGraph"); Value action = bindingSet.getValue("action"); Value ordered = bindingSet.getValue("ordered"); logger.debug("found test case : {}", testName); // Query named graphs namedGraphsQuery.setBinding("action", action); TupleQueryResult namedGraphs = namedGraphsQuery.evaluate(); DatasetImpl dataset = null; if (defaultGraphURI != null || namedGraphs.hasNext()) { dataset = new DatasetImpl(); if (defaultGraphURI != null) { dataset.addDefaultGraph(defaultGraphURI); } while (namedGraphs.hasNext()) { BindingSet graphBindings = namedGraphs.next(); URI namedGraphURI = (URI) graphBindings.getValue("graph"); logger.debug(" adding named graph : {}", namedGraphURI); dataset.addNamedGraph(namedGraphURI); } } // Check for lax-cardinality conditions boolean laxCardinality = false; laxCardinalityQuery.setBinding("testURI", testURI); TupleQueryResult laxCardinalityResult = laxCardinalityQuery.evaluate(); try { laxCardinality = laxCardinalityResult.hasNext(); } finally { laxCardinalityResult.close(); } // if this is enabled, Sesame passes all tests, showing that the // only // difference is the semantics of arbitrary-length // paths /* * if (!laxCardinality) { // property-path tests always with lax * cardinality because Sesame filters out duplicates by design if * (testURI.stringValue().contains("property-path")) { * laxCardinality = true; } } */ // check if we should test for query result ordering boolean checkOrder = false; if (ordered != null) { checkOrder = Boolean.parseBoolean(ordered.stringValue()); } SPARQLQueryTest test = factory.createSPARQLQueryTest( testURI.toString(), testName, queryFile, resultFile, dataset, laxCardinality, checkOrder); if (test != null) { suite.addTest(test); } } testCases.close(); con.close(); manifestRep.shutDown(); logger.info("Created test suite with " + suite.countTestCases() + " test cases."); return suite; }