public TestSuite createTestSuite() throws Exception { // Create test suite TestSuite suite = new TestSuite(N3ParserTestCase.class.getName()); // Add the manifest to a repository and query it Repository repository = new SailRepository(new MemoryStore()); repository.initialize(); RepositoryConnection con = repository.getConnection(); URL url = url(MANIFEST_URL); con.add(url, base(MANIFEST_URL), RDFFormat.TURTLE); // Add all positive parser tests to the test suite String query = "SELECT testURI, inputURL, outputURL " + "FROM {testURI} rdf:type {n3test:PositiveParserTest}; " + " n3test:inputDocument {inputURL}; " + " n3test:outputDocument {outputURL} " + "USING NAMESPACE n3test = <http://www.w3.org/2004/11/n3test#>"; TupleQueryResult queryResult = con.prepareTupleQuery(QueryLanguage.SERQL, query).evaluate(); while (queryResult.hasNext()) { BindingSet bindingSet = queryResult.next(); String testURI = bindingSet.getValue("testURI").toString(); String inputURL = bindingSet.getValue("inputURL").toString(); String outputURL = bindingSet.getValue("outputURL").toString(); suite.addTest(new PositiveParserTest(testURI, inputURL, outputURL)); } queryResult.close(); // Add all negative parser tests to the test suite query = "SELECT testURI, inputURL " + "FROM {testURI} rdf:type {n3test:NegativeParserTest}; " + " n3test:inputDocument {inputURL} " + "USING NAMESPACE n3test = <http://www.w3.org/2004/11/n3test#>"; queryResult = con.prepareTupleQuery(QueryLanguage.SERQL, query).evaluate(); while (queryResult.hasNext()) { BindingSet bindingSet = queryResult.next(); String testURI = bindingSet.getValue("testURI").toString(); String inputURL = bindingSet.getValue("inputURL").toString(); suite.addTest(new NegativeParserTest(testURI, inputURL)); } queryResult.close(); con.close(); repository.shutDown(); return suite; }
public void tupleQuery() throws QueryEvaluationException, RepositoryException, MalformedQueryException { // /query repo // con.setNamespace("onto", // "<http://it.unibz.krdb/obda/ontologies/test/translation/onto2.owl#>"); // System.out.println(con.getNamespaces().next().toString()); String queryString = "PREFIX : \n<http://it.unibz.krdb/obda/ontologies/test/translation/onto2.owl#>\n " + "SELECT ?x ?y WHERE { ?x a :Person. ?x :age ?y } "; // String queryString = // "SELECT ?x ?y WHERE { ?x a onto:Person. ?x onto:age ?y } "; TupleQuery tupleQuery = (con).prepareTupleQuery(QueryLanguage.SPARQL, queryString); TupleQueryResult result = tupleQuery.evaluate(); System.out.println(result.getBindingNames()); while (result.hasNext()) { BindingSet bindingSet = result.next(); Value valueOfX = bindingSet.getValue("x"); Literal valueOfY = (Literal) bindingSet.getValue("y"); System.out.println(valueOfX.stringValue() + ", " + valueOfY.floatValue()); } result.close(); }
public Collection<String> getAllDisjointProperties(String startingProperty) throws RepositoryException, MalformedQueryException, QueryEvaluationException { Set<String> allDisjointProperties = new HashSet<>(); RepositoryConnection con = this.repository.getConnection(); try { TupleQuery allDisjointPropertiesQuery = con.prepareTupleQuery( QueryLanguage.SPARQL, "SELECT ?items " + "WHERE { ?y a owl:AllDisjointProperties ." + "?y owl:members ?members ." + "?members rdf:rest*/rdf:first <" + startingProperty + "> ." + "?members rdf:rest*/rdf:first ?items ." + "FILTER (?items != <" + startingProperty + ">) .}"); TupleQueryResult allDisjointPropertiesResult = allDisjointPropertiesQuery.evaluate(); try { while (allDisjointPropertiesResult.hasNext()) { BindingSet bindingSet = allDisjointPropertiesResult.next(); Value name = bindingSet.getValue("items"); allDisjointProperties.add(name.toString()); } } finally { allDisjointPropertiesResult.close(); } } finally { con.close(); } return allDisjointProperties; }
public Collection<String> getIntersectionOf(String startingClass) throws RepositoryException, MalformedQueryException, QueryEvaluationException { Set<String> intersectionOfClasses = new HashSet<>(); RepositoryConnection con = this.repository.getConnection(); try { TupleQuery intersectionOfQuery = con.prepareTupleQuery( QueryLanguage.SPARQL, "SELECT ?c WHERE { ?c owl:intersectionOf ?l . " + "?l rdf:rest*/rdf:first <" + startingClass + "> .}"); TupleQueryResult intersectionOfResult = intersectionOfQuery.evaluate(); try { while (intersectionOfResult.hasNext()) { BindingSet bindingSet = intersectionOfResult.next(); Value name = bindingSet.getValue("c"); intersectionOfClasses.add(name.toString()); // System.out.println("intersections Ofs : " + name); } } finally { intersectionOfResult.close(); } } finally { con.close(); } return intersectionOfClasses; }
private void cleanup(TupleQueryResult result) { try { if (result != null) { result.close(); } } catch (Exception ex) { throw new IllegalStateException(ex); } }
public List<String> getValuesFor(URI context, URI predicate) throws RepositoryException { RepositoryConnection con = repository.getConnection(); List<String> values = new ArrayList<String>(); try { TupleQuery query = con.prepareTupleQuery( QueryLanguage.SPARQL, "SELECT distinct ?v WHERE {GRAPH <" + context + "> {?d <" + predicate + "> ?v}}"); TupleQueryResult statements = query.evaluate(); while (statements.hasNext()) values.add(statements.next().getBinding("v").getValue().stringValue()); statements.close(); } finally { con.close(); return values; } }
@Override public int runQuery(Query q, int run) throws Exception { TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, q.getQuery()); TupleQueryResult res = (TupleQueryResult) query.evaluate(); int resCounter = 0; try { while (res.hasNext()) { if (isInterrupted()) throw new QueryEvaluationException("Thread has been interrupted."); BindingSet bindings = res.next(); resCounter++; earlyResults.handleResult(bindings, resCounter); } } finally { res.close(); } return resCounter; }
@Override public List<SesameMatch> runQuery(final Query query, final String queryDefinition) throws RepositoryException, MalformedQueryException, QueryEvaluationException { final List<SesameMatch> results = new ArrayList<>(); TupleQueryResult queryResults; tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, queryDefinition); queryResults = tupleQuery.evaluate(); try { while (queryResults.hasNext()) { final BindingSet bs = queryResults.next(); final SesameMatch match = SesameMatch.createMatch(query, bs); results.add(match); } } finally { queryResults.close(); } return results; }
protected static String getManifestName( Repository manifestRep, RepositoryConnection con, String manifestFileURL) throws QueryEvaluationException, RepositoryException, MalformedQueryException { // Try to extract suite name from manifest file TupleQuery manifestNameQuery = con.prepareTupleQuery( QueryLanguage.SERQL, "SELECT ManifestName FROM {ManifestURL} rdfs:label {ManifestName}"); manifestNameQuery.setBinding( "ManifestURL", manifestRep.getValueFactory().createURI(manifestFileURL)); TupleQueryResult manifestNames = manifestNameQuery.evaluate(); try { if (manifestNames.hasNext()) { return manifestNames.next().getValue("ManifestName").stringValue(); } } finally { manifestNames.close(); } // Derive name from manifest URL int lastSlashIdx = manifestFileURL.lastIndexOf('/'); int secLastSlashIdx = manifestFileURL.lastIndexOf('/', lastSlashIdx - 1); return manifestFileURL.substring(secLastSlashIdx + 1, lastSlashIdx); }
// sparql public void sparql() throws IOException, RepositoryException, QueryEvaluationException { // find all expired data to avoid them participating the query: this.toDeleteCounter = 0; this.dropQueryString = ""; ArrayList<GraphIdCounterPair> expiredData = new ArrayList<GraphIdCounterPair>(); LocalTime evictionTime = LocalTime.now(); for (GraphIdCounterPair x : this.cacheContentOfGraphIds) { System.out.print( this.evictCounter + ", " + this.size + ", " + this.evictAmount + ", " + x.graphId + ", " + x.arrivalTime + ", " + x.expirationTime); if (x.expirationTime.isBefore(evictionTime)) { expiredData.add(x); dropQueryString += "drop graph <" + x.graphId + ">;"; System.out.println(", expired"); ++toDeleteCounter; } else { System.out.println(); } } System.out.println("[INFO] " + expiredData.size() + " data expired!"); if (!expiredData.isEmpty()) { // delete expired data from the cache for (GraphIdCounterPair x : expiredData) { this.cacheContentOfGraphIds.remove(x); } // delete the expired data from the database QueryExecution qe = AGQueryExecutionFactory.create(AGQueryFactory.create(dropQueryString), model); qe.execAsk(); qe.close(); } // after deleting expired data, load the cache again this.streamEmulation(); System.out.println(this.reasoner.getEntailmentRegime()); this.infModel = new AGInfModel(this.reasoner, this.model); String queryString = "select distinct ?s " + "where { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type>" + "<http://swat.cse.lehigh.edu/onto/univ-bench.owl#Professor>.}"; AGRepositoryConnection conn = this.client.getAGConn(); TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString); tupleQuery.setIncludeInferred(true); long sparqlStartTime = System.currentTimeMillis(); TupleQueryResult resultSet = tupleQuery.evaluate(); long sparqlEndTime = System.currentTimeMillis(); this.aveSparql += (sparqlEndTime - sparqlStartTime); ArrayList<String> results = new ArrayList<String>(); while (resultSet.hasNext()) { String result = resultSet.next().toString(); System.out.println("result"); int length = result.length(); results.add(result.substring(3, length - 1)); } resultSet.close(); this.fMeasureBench(results); this.infModel.close(); }
public void test_query() throws Exception { final BigdataSail sail = getSail(); try { sail.initialize(); if (!((BigdataSail) sail).database.getStatementIdentifiers()) { log.warn("Statement identifiers are not enabled"); return; } /* * Load data into the sail. */ { final DataLoader dataLoader = sail.database.getDataLoader(); dataLoader.loadData( "/com/bigdata/rdf/sail/provenance01.ttlx", "" /*baseURL*/, ServiceProviderHook.TURTLE_RDR); } /* * Serialize as RDF/XML. * * Note: This is just for debugging. */ if (log.isInfoEnabled()) { final BigdataStatementIterator itr = sail.database.getStatements(null, null, null); final String rdfXml; try { final Writer w = new StringWriter(); // final RDFXMLWriter rdfWriter = new RDFXMLWriter(w); final RDFWriterFactory writerFactory = RDFWriterRegistry.getInstance().get(RDFFormat.RDFXML); assertNotNull(writerFactory); final RDFWriter rdfWriter = writerFactory.getWriter(w); rdfWriter.startRDF(); while (itr.hasNext()) { final BigdataStatementImpl stmt = (BigdataStatementImpl) itr.next(); // only write the explicit statements. if (!stmt.isExplicit()) continue; rdfWriter.handleStatement(stmt); } rdfWriter.endRDF(); rdfXml = w.toString(); } finally { itr.close(); } // write the rdf/xml log.info(rdfXml); } final SailConnection conn = sail.getConnection(); try { final URI y = new URIImpl("http://www.foo.org/y"); final URI B = new URIImpl("http://www.foo.org/B"); final URI dcCreator = new URIImpl("http://purl.org/dc/terms/creator"); final Literal bryan = new LiteralImpl("bryan"); final Literal mike = new LiteralImpl("mike"); /* * This is a hand-coded query. * * Note: When statement identifiers are enabled, the only way to * bind the context position is to already have a statement on hand - * there is no index which can be used to look up a statement by its * context and the context is always a blank node. */ // final TupleExpr tupleExpr = // new Projection( // new Join(// // new StatementPattern(// // new Var("X", y),// // new Var("1", RDF.TYPE),// // new Var("2", B),// // new Var("SID")),// unbound. // new StatementPattern(// // new Var("SID"),// // new Var("3", dcCreator),// // new Var("Y"))), // new ProjectionElemList(new ProjectionElem[] { new ProjectionElem( "Y" // )})); // final String q = "select ?Y where { ?SID <"+dcCreator+"> ?Y . graph ?SID { // <"+y+"> <"+RDF.TYPE+"> <"+B+"> . } }"; final String q = "select ?Y where { <<<" + y + "> <" + RDF.TYPE + "> <" + B + ">>> <" + dcCreator + "> ?Y . }"; /* * Create a data set consisting of the contexts to be queried. * * Note: a [null] DataSet will cause context to be ignored when the * query is processed. */ // final DatasetImpl dataSet = null; //new DatasetImpl(); // // final BindingSet bindingSet = new QueryBindingSet(); // // final CloseableIteration<? extends BindingSet, QueryEvaluationException> itr = // conn // .evaluate(tupleExpr, dataSet, bindingSet, true/* includeInferred */); final TupleQuery tq = new BigdataSailRepository(sail) .getReadOnlyConnection() .prepareTupleQuery(QueryLanguage.SPARQL, q); final TupleQueryResult itr = tq.evaluate(); if (log.isInfoEnabled()) log.info("Verifying query."); /* * These are the expected results for the query (the bindings for Y). */ final Set<Value> expected = new HashSet<Value>(); expected.add(bryan); expected.add(mike); /* * Verify that the query results is the correct solutions. */ final int nresults = expected.size(); try { int i = 0; while (itr.hasNext()) { final BindingSet solution = itr.next(); if (log.isInfoEnabled()) log.info("solution[" + i + "] : " + solution); final Value actual = solution.getValue("Y"); assertTrue("Not expecting Y=" + actual, expected.remove(actual)); i++; } assertEquals("#results", nresults, i); } finally { itr.close(); } } finally { conn.close(); } } finally { sail.__tearDownUnitTest(); } }
public static TestSuite suite(String manifestFileURL, Factory factory, boolean approvedOnly) throws Exception { logger.info("Building test suite for {}", manifestFileURL); TestSuite suite = new TestSuite(factory.getClass().getName()); // Read manifest and create declared test cases Repository manifestRep = new SailRepository(new MemoryStore()); manifestRep.initialize(); RepositoryConnection con = manifestRep.getConnection(); ManifestTest.addTurtle(con, new URL(manifestFileURL), manifestFileURL); suite.setName(getManifestName(manifestRep, con, manifestFileURL)); // Extract test case information from the manifest file. Note that we // only // select those test cases that are mentioned in the list. StringBuilder query = new StringBuilder(512); query.append( " SELECT DISTINCT testURI, testName, resultFile, action, queryFile, defaultGraph, ordered "); query.append(" FROM {} rdf:first {testURI} "); if (approvedOnly) { query.append(" dawgt:approval {dawgt:Approved}; "); } query.append(" mf:name {testName}; "); query.append(" mf:result {resultFile}; "); query.append(" [ mf:checkOrder {ordered} ]; "); query.append(" [ mf:requires {Requirement} ];"); query.append(" mf:action {action} qt:query {queryFile}; "); query.append(" [qt:data {defaultGraph}]; "); query.append(" [sd:entailmentRegime {Regime} ]"); // skip tests involving CSV result files, these are not query tests query.append(" WHERE NOT resultFile LIKE \"*.csv\" "); // skip tests involving JSON, sesame currently does not have a // SPARQL/JSON // parser. query.append(" AND NOT resultFile LIKE \"*.srj\" "); // skip tests involving entailment regimes query.append(" AND NOT BOUND(Regime) "); // skip test involving basic federation, these are tested separately. query.append(" AND (NOT BOUND(Requirement) OR (Requirement != mf:BasicFederation)) "); query.append(" USING NAMESPACE "); query.append(" mf = <http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#>, "); query.append(" dawgt = <http://www.w3.org/2001/sw/DataAccess/tests/test-dawg#>, "); query.append(" qt = <http://www.w3.org/2001/sw/DataAccess/tests/test-query#>, "); query.append(" sd = <http://www.w3.org/ns/sparql-service-description#>, "); query.append(" ent = <http://www.w3.org/ns/entailment/> "); TupleQuery testCaseQuery = con.prepareTupleQuery(QueryLanguage.SERQL, query.toString()); query.setLength(0); query.append(" SELECT graph "); query.append(" FROM {action} qt:graphData {graph} "); query.append(" USING NAMESPACE "); query.append(" qt = <http://www.w3.org/2001/sw/DataAccess/tests/test-query#>"); TupleQuery namedGraphsQuery = con.prepareTupleQuery(QueryLanguage.SERQL, query.toString()); query.setLength(0); query.append("SELECT 1 "); query.append(" FROM {testURI} mf:resultCardinality {mf:LaxCardinality}"); query.append( " USING NAMESPACE mf = <http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#>"); TupleQuery laxCardinalityQuery = con.prepareTupleQuery(QueryLanguage.SERQL, query.toString()); logger.debug("evaluating query.."); TupleQueryResult testCases = testCaseQuery.evaluate(); while (testCases.hasNext()) { BindingSet bindingSet = testCases.next(); URI testURI = (URI) bindingSet.getValue("testURI"); String testName = bindingSet.getValue("testName").toString(); String resultFile = bindingSet.getValue("resultFile").toString(); String queryFile = bindingSet.getValue("queryFile").toString(); URI defaultGraphURI = (URI) bindingSet.getValue("defaultGraph"); Value action = bindingSet.getValue("action"); Value ordered = bindingSet.getValue("ordered"); logger.debug("found test case : {}", testName); // Query named graphs namedGraphsQuery.setBinding("action", action); TupleQueryResult namedGraphs = namedGraphsQuery.evaluate(); DatasetImpl dataset = null; if (defaultGraphURI != null || namedGraphs.hasNext()) { dataset = new DatasetImpl(); if (defaultGraphURI != null) { dataset.addDefaultGraph(defaultGraphURI); } while (namedGraphs.hasNext()) { BindingSet graphBindings = namedGraphs.next(); URI namedGraphURI = (URI) graphBindings.getValue("graph"); logger.debug(" adding named graph : {}", namedGraphURI); dataset.addNamedGraph(namedGraphURI); } } // Check for lax-cardinality conditions boolean laxCardinality = false; laxCardinalityQuery.setBinding("testURI", testURI); TupleQueryResult laxCardinalityResult = laxCardinalityQuery.evaluate(); try { laxCardinality = laxCardinalityResult.hasNext(); } finally { laxCardinalityResult.close(); } // if this is enabled, Sesame passes all tests, showing that the // only // difference is the semantics of arbitrary-length // paths /* * if (!laxCardinality) { // property-path tests always with lax * cardinality because Sesame filters out duplicates by design if * (testURI.stringValue().contains("property-path")) { * laxCardinality = true; } } */ // check if we should test for query result ordering boolean checkOrder = false; if (ordered != null) { checkOrder = Boolean.parseBoolean(ordered.stringValue()); } SPARQLQueryTest test = factory.createSPARQLQueryTest( testURI.toString(), testName, queryFile, resultFile, dataset, laxCardinality, checkOrder); if (test != null) { suite.addTest(test); } } testCases.close(); con.close(); manifestRep.shutDown(); logger.info("Created test suite with " + suite.countTestCases() + " test cases."); return suite; }