void printFailedResultSetTest( Query query, QueryExecution qe, ResultSetRewindable qrExpected, ResultSetRewindable qrActual) { PrintStream out = System.out; out.println(); out.println("======================================="); out.println("Failure: " + description()); out.println("Query: \n" + query); // if ( qe != null && qe.getDataset() != null ) // { // out.println("Data: \n"+qe.getDataset().asDatasetGraph()) ; // } out.println("Got: " + qrActual.size() + " --------------------------------"); qrActual.reset(); ResultSetFormatter.out(out, qrActual, query.getPrefixMapping()); qrActual.reset(); out.flush(); out.println("Expected: " + qrExpected.size() + " -----------------------------"); qrExpected.reset(); ResultSetFormatter.out(out, qrExpected, query.getPrefixMapping()); qrExpected.reset(); out.println(); out.flush(); }
void runTestSelect(Query query, QueryExecution qe) throws Exception { // Do the query! ResultSetRewindable resultsActual = ResultSetFactory.makeRewindable(qe.execSelect()); qe.close(); if (results == null) return; // Assumes resultSetCompare can cope with full isomorphism possibilities. ResultSetRewindable resultsExpected; if (results.isResultSet()) resultsExpected = ResultSetFactory.makeRewindable(results.getResultSet()); else if (results.isModel()) resultsExpected = ResultSetFactory.makeRewindable(results.getModel()); else { fail("Wrong result type for SELECT query"); resultsExpected = null; // Keep the compiler happy } if (query.isReduced()) { // Reduced - best we can do is DISTINCT resultsExpected = unique(resultsExpected); resultsActual = unique(resultsActual); } // Hack for CSV : tests involving bNodes need manually checking. if (testItem.getResultFile().endsWith(".csv")) { resultsActual = convertToStrings(resultsActual); resultsActual.reset(); int nActual = ResultSetFormatter.consume(resultsActual); int nExpected = ResultSetFormatter.consume(resultsExpected); resultsActual.reset(); resultsExpected.reset(); assertEquals("CSV: Different number of rows", nExpected, nActual); boolean b = resultSetEquivalent(query, resultsExpected, resultsActual); if (!b) System.out.println("Manual check of CSV results required: " + testItem.getName()); return; } boolean b = resultSetEquivalent(query, resultsExpected, resultsActual); if (!b) { resultsExpected.reset(); resultsActual.reset(); boolean b2 = resultSetEquivalent(query, resultsExpected, resultsActual); printFailedResultSetTest(query, qe, resultsExpected, resultsActual); } assertTrue("Results do not match: " + testItem.getName(), b); return; }
// TEMPORARY private boolean checkResultsByModel( Query query, Model expectedModel, ResultSetRewindable results) { // Fudge - can't cope with ordered results properly. The output writer for ResultSets does nto // add rs:index. results.reset(); Model actualModel = ResultSetFormatter.toModel(results); // Tidy the models. // Very regretable. expectedModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultSet); expectedModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultSolution); expectedModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultBinding); expectedModel.removeAll(null, ResultSetGraphVocab.size, (RDFNode) null); expectedModel.removeAll(null, ResultSetGraphVocab.index, (RDFNode) null); actualModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultSet); actualModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultSolution); actualModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultBinding); actualModel.removeAll(null, ResultSetGraphVocab.size, (RDFNode) null); actualModel.removeAll(null, ResultSetGraphVocab.index, (RDFNode) null); boolean b = expectedModel.isIsomorphicWith(actualModel); if (!b) { System.out.println("---- Expected"); expectedModel.write(System.out, "TTL"); System.out.println("---- Actual"); actualModel.write(System.out, "TTL"); System.out.println("----"); } return b; }
@Test public void testSparql() { String queryStr = "select distinct ?Concept where {[] a ?Concept} LIMIT 10"; Query query = QueryFactory.create(queryStr); // Remote execution. try (QueryExecution qexec = QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql", query)) { // Set the DBpedia specific timeout. ((QueryEngineHTTP) qexec).addParam("timeout", "10000"); // Execute. ResultSet rs = qexec.execSelect(); ResultSetFormatter.out(System.out, rs, query); } catch (Exception e) { e.printStackTrace(); } }
@Test public void testDOQuery2() { String queryString = "prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" + "prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" + "prefix owl: <http://www.w3.org/2002/07/owl#>\n" + "\n" + "select *\n" + "from <http://purl.obolibrary.org/obo/merged/DOID>\n" + "\n" + "WHERE {\n" + " <http://purl.obolibrary.org/obo/DOID_1485> <http://www.w3.org/2000/01/rdf-schema#subClassOf> ?o\n" + "}"; Query query = QueryFactory.create(queryString); System.out.println("String: " + queryString); QueryExecution qExe = QueryExecutionFactory.sparqlService("http://sparql.hegroup.org/sparql/", query); ResultSet results = qExe.execSelect(); ResultSetFormatter.out(System.out, results, query); }
public static void main(String... argv) { String queryString = "SELECT * { ?s ?p ?o }"; Query query = QueryFactory.create(queryString); Store store = SDBFactory.connectStore("sdb.ttl"); // Must be a DatasetStore to trigger the SDB query engine. // Creating a graph from the Store, and adding it to a general // purpose dataset will not necesarily exploit full SQL generation. // The right answers will be obtained but slowly. Dataset ds = DatasetStore.create(store); QueryExecution qe = QueryExecutionFactory.create(query, ds); try { ResultSet rs = qe.execSelect(); ResultSetFormatter.out(rs); } finally { qe.close(); } // Close the SDB conenction which also closes the underlying JDBC connection. store.getConnection().close(); store.close(); }
@Override public EntityDefinition open(Assembler a, Resource root, Mode mode) { String prologue = "PREFIX : <" + NS + "> PREFIX list: <http://jena.apache.org/ARQ/list#> "; Model model = root.getModel(); String qs1 = StrUtils.strjoinNL( prologue, "SELECT * {", " ?eMap :entityField ?entityField ;", " :map ?map ;", " :defaultField ?dftField .", " OPTIONAL {", " ?eMap :graphField ?graphField", " }", " OPTIONAL {", " ?eMap :langField ?langField", " }", " OPTIONAL {", " ?eMap :uidField ?uidField", " }", "}"); ParameterizedSparqlString pss = new ParameterizedSparqlString(qs1); pss.setIri("eMap", root.getURI()); Query query1 = QueryFactory.create(pss.toString()); QueryExecution qexec1 = QueryExecutionFactory.create(query1, model); ResultSet rs1 = qexec1.execSelect(); List<QuerySolution> results = ResultSetFormatter.toList(rs1); if (results.size() == 0) { Log.warn(this, "Failed to find a valid EntityMap for : " + root); throw new TextIndexException("Failed to find a valid EntityMap for : " + root); } if (results.size() != 1) { Log.warn(this, "Multiple matches for EntityMap for : " + root); throw new TextIndexException("Multiple matches for EntityMap for : " + root); } QuerySolution qsol1 = results.get(0); String entityField = qsol1.getLiteral("entityField").getLexicalForm(); String graphField = qsol1.contains("graphField") ? qsol1.getLiteral("graphField").getLexicalForm() : null; String langField = qsol1.contains("langField") ? qsol1.getLiteral("langField").getLexicalForm() : null; String defaultField = qsol1.contains("dftField") ? qsol1.getLiteral("dftField").getLexicalForm() : null; String uniqueIdField = qsol1.contains("uidField") ? qsol1.getLiteral("uidField").getLexicalForm() : null; Multimap<String, Node> mapDefs = HashMultimap.create(); Map<String, Analyzer> analyzerDefs = new HashMap<>(); Statement listStmt = root.getProperty(TextVocab.pMap); while (listStmt != null) { RDFNode n = listStmt.getObject(); if (!n.isResource()) { throw new TextIndexException("Text list node is not a resource : " + n); } Resource listResource = n.asResource(); if (listResource.equals(RDF.nil)) { break; // end of the list } Statement listEntryStmt = listResource.getProperty(RDF.first); if (listEntryStmt == null) { throw new TextIndexException("Text map list is not well formed. No rdf:first property"); } n = listEntryStmt.getObject(); if (!n.isResource()) { throw new TextIndexException("Text map list entry is not a resource : " + n); } Resource listEntry = n.asResource(); Statement fieldStatement = listEntry.getProperty(TextVocab.pField); if (fieldStatement == null) { throw new TextIndexException("Text map entry has no field property"); } n = fieldStatement.getObject(); if (!n.isLiteral()) { throw new TextIndexException("Text map entry field property has no literal value : " + n); } String field = n.asLiteral().getLexicalForm(); Statement predicateStatement = listEntry.getProperty(TextVocab.pPredicate); if (predicateStatement == null) { throw new TextIndexException("Text map entry has no predicate property"); } n = predicateStatement.getObject(); if (!n.isURIResource()) { throw new TextIndexException( "Text map entry predicate property has non resource value : " + n); } mapDefs.put(field, n.asNode()); Statement analyzerStatement = listEntry.getProperty(TextVocab.pAnalyzer); if (analyzerStatement != null) { n = analyzerStatement.getObject(); if (!n.isResource()) { throw new TextIndexException("Text map entry analyzer property is not a resource : " + n); } Resource analyzerResource = n.asResource(); Analyzer analyzer = (Analyzer) a.open(analyzerResource); analyzerDefs.put(field, analyzer); } // move on to the next element in the list listStmt = listResource.getProperty(RDF.rest); } // Primary field/predicate if (defaultField != null) { Collection<Node> c = mapDefs.get(defaultField); if (c.isEmpty()) throw new TextIndexException("No definition of primary field '" + defaultField + "'"); } EntityDefinition docDef = new EntityDefinition(entityField, defaultField); docDef.setGraphField(graphField); docDef.setLangField(langField); docDef.setUidField(uniqueIdField); for (String f : mapDefs.keys()) { for (Node p : mapDefs.get(f)) docDef.set(f, p); } for (String f : analyzerDefs.keySet()) { docDef.setAnalyzer(f, analyzerDefs.get(f)); } return docDef; }
@Test public void testDO() { String doid = "1485"; String queryString = "prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" + "prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" + "prefix owl: <http://www.w3.org/2002/07/owl#>\n" + "\n" + "select ?s ?p ?o \n" + "from <http://purl.obolibrary.org/obo/merged/DOID>\n" + "\n" + "WHERE {\n" + " <http://purl.obolibrary.org/obo/DOID_" + doid + "> ?p ?o\n" + "}"; Query query = QueryFactory.create(queryString); QueryExecution qExe = QueryExecutionFactory.sparqlService("http://sparql.hegroup.org/sparql/", query); ResultSet results = qExe.execSelect(); ResultSetFormatter.out(System.out, results, query); assertNotNull(results); /*Model model = ModelFactory.createDefaultModel(); Selector selector = new SimpleSelector(null, model.getProperty("<http://www.geneontology.org/formats/oboInOwl#hasDbXref>"), (RDFNode) null); // you need to cast the last null as otherwise the method is ambigious */ List<String> dbXref = new ArrayList<>(); List<String> iao = new ArrayList<>(); List<String> exactSynonym = new ArrayList<>(); List<String> alternativeId = new ArrayList<>(); String diseaseLabel; while (results.hasNext()) { QuerySolution querySolution = results.nextSolution(); if (querySolution.get("p").toString().matches("rdfs:label ")) { diseaseLabel = querySolution.get("o").toString(); } if (querySolution .get("p") .toString() .matches("http://www.geneontology.org/formats/oboInOwl#hasDbXref")) { System.out.println( querySolution.get("p").toString() + " " + querySolution.get("o").toString()); dbXref.add(querySolution.get("o").toString()); } if (querySolution.get("p").toString().matches("http://purl.obolibrary.org/obo/IAO_0000115")) { System.out.println( querySolution.get("p").toString() + " " + querySolution.get("o").toString()); iao.add(querySolution.get("o").toString()); } if (querySolution .get("p") .toString() .matches("http://www.geneontology.org/formats/oboInOwl#hasExactSynonym")) { System.out.println( querySolution.get("p").toString() + " " + querySolution.get("o").toString()); exactSynonym.add(querySolution.get("o").toString()); } if (querySolution .get("p") .toString() .matches("http://www.geneontology.org/formats/oboInOwl#hasAlternativeId")) { System.out.println( querySolution.get("p").toString() + " " + querySolution.get("o").toString()); alternativeId.add(querySolution.get("o").toString()); } } assertNotNull(dbXref); assertNotNull(iao); }
@Test public void testWURCS() { String ct = "RES\n" + "1b:b-dglc-HEX-1:5\n" + "2s:n-acetyl\n" + "3b:b-dglc-HEX-1:5\n" + "4s:n-acetyl\n" + "5b:b-dman-HEX-1:5\n" + "6b:a-dman-HEX-1:5\n" + "7b:a-dman-HEX-1:5\n" + "8b:a-dman-HEX-1:5\n" + "9b:a-dman-HEX-1:5\n" + "10b:a-dman-HEX-1:5\n" + "LIN\n" + "1:1d(2+1)2n\n" + "2:1o(4+1)3d\n" + "3:3d(2+1)4n\n" + "4:3o(4+1)5d\n" + "5:5o(3+1)6d\n" + "6:6o(2+1)7d\n" + "7:5o(6+1)8d\n" + "8:8o(3+1)9d\n" + "9:8o(6+1)10d\n" + "UND\n" + "UND1:100.0:100.0\n" + "ParentIDs:7|9|10\n" + "SubtreeLinkageID1:o(2+1)d\n" + "RES\n" + "11b:a-dman-HEX-1:5"; ct = ct.replaceAll("\n", "\\\\n"); String queryString = "PREFIX glycan: <http://purl.jp/bio/12/glyco/glycan#>\n" + "PREFIX glytoucan: <http://www.glytoucan.org/glyco/owl/glytoucan#>\n" + "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" + "\n" + "SELECT DISTINCT ?Sequence\n" + "FROM <http://rdf.glytoucan.org>\n" + "FROM <http://rdf.glytoucan.org/sequence/wurcs>\n" + "\n" + "WHERE {\n" + "?SaccharideURI a glycan:saccharide .\n" + "?SaccharideURI glycan:has_glycosequence ?GlycanSequenceURI .\n" + "?GlycanSequenceURI glycan:has_sequence ?Sequence .\n" + "?GlycanSequenceURI glycan:in_carbohydrate_format glycan:carbohydrate_format_wurcs .\n" + "?SaccharideURI glycan:has_glycosequence ?FormatGlycoSequenceURI .\n" + "?FormatGlycoSequenceURI glycan:in_carbohydrate_format glycan:carbohydrate_format_glycoct .\n" + "?FormatGlycoSequenceURI glycan:has_sequence" + " \"" + ct + "\"" + "^^xsd:string ." + "}"; System.out.println("String: " + queryString); Query query = QueryFactory.create(queryString); QueryExecution qExe = QueryExecutionFactory.sparqlService("http://test.ts.glytoucan.org/sparql", query); ResultSet results = qExe.execSelect(); ResultSetFormatter.out(System.out, results, query); }
@Test public void testGlycotoucanCTSearch() { List<Structure> structures = Ebean.find(Structure.class).findList(); String ct = ""; for (Structure structure : structures) { if (structure.id >= 7400) { if (structure.glycanst.startsWith("v--")) { structure.glycanst = structure.glycanst.replace("v--", "FreeEnd--"); } if (structure.glycanst.startsWith("FreenEnd")) { structure.glycanst = structure.glycanst.replace("FreenEnd", "FreeEnd"); } if (structure.glycanst.startsWith("FreeEnd?")) { structure.glycanst = structure.glycanst.replace("FreeEnd?", "FreeEnd--?"); } if (structure.glycanst.startsWith("<Gly") || structure.glycanst.contains("0.0000u")) { continue; } System.out.println(structure.getGlycanst()); BuilderWorkspace workspace = new BuilderWorkspace(new GlycanRendererAWT()); workspace.setNotation("cfg"); // cfgbw | uoxf | uoxfcol | text GlycanRenderer renderer = workspace.getGlycanRenderer(); org.eurocarbdb.application.glycanbuilder.Glycan glycan = org.eurocarbdb.application.glycanbuilder.Glycan.fromString(structure.glycanst); ct = glycan.toGlycoCTCondensed(); // System.out.println(ct); // } // } /*String ct = "RES\\n" + "1b:a-dgal-HEX-1:5\\n" + "2s:n-acetyl\\n" + "3b:b-dgal-HEX-1:5\\n" + "4b:a-lgal-HEX-1:5|6:d\\n" + "5b:a-dgal-HEX-1:5\\n" + "6s:n-acetyl\\n" + "7b:b-dglc-HEX-1:5\\n" + "8s:n-acetyl\\n" + "LIN\\n" + "1:1d(2+1)2n\\n" + "2:1o(3+1)3d\\n" + "3:3o(2+1)4d\\n" + "4:3o(3+1)5d\\n" + "5:5d(2+1)6n\\n" + "6:1o(6+1)7d\\n" + "7:7d(2+1)8n"; ct = "RES\n" + "1b:b-dglc-HEX-1:5\n" + "2s:n-acetyl\n" + "3b:b-dglc-HEX-1:5\n" + "4s:n-acetyl\n" + "5b:b-dman-HEX-1:5\n" + "6b:a-dman-HEX-1:5\n" + "7b:a-dman-HEX-1:5\n" + "8b:a-lgal-HEX-1:5|6:d\n" + "LIN\n" + "1:1d(2+1)2n\n" + "2:1o(4+1)3d\n" + "3:3d(2+1)4n\n" + "4:3o(4+1)5d\n" + "5:5o(3+1)6d\n" + "6:5o(6+1)7d\n" + "7:1o(6+1)8d\n" + "UND\n" + "UND1:100.0:100.0\n" + "ParentIDs:1|3|5|6|7|8\n" + "SubtreeLinkageID1:x(-1+1)x\n" + "RES\n" + "9b:b-dglc-HEX-1:5\n" + "10s:n-acetyl\n" + "11b:a-lgal-HEX-1:5|6:d\n" + "12b:b-dgal-HEX-1:5\n" + "13b:a-dgro-dgal-NON-2:6|1:a|2:keto|3:d\n" + "14s:n-acetyl\n" + "LIN\n" + "8:9d(2+1)10n\n" + "9:9o(3+1)11d\n" + "10:9o(4+1)12d\n" + "11:12o(-1+2)13d\n" + "12:13d(5+1)14n\n" + "UND2:100.0:100.0\n" + "ParentIDs:1|3|5|6|7|8\n" + "SubtreeLinkageID1:x(-1+1)x\n" + "RES\n" + "15b:b-dglc-HEX-1:5\n" + "16s:n-acetyl\n" + "17b:a-lgal-HEX-1:5|6:d\n" + "18b:b-dgal-HEX-1:5\n" + "LIN\n" + "13:15d(2+1)16n\n" + "14:15o(3+1)17d\n" + "15:15o(4+1)18d\n" + "UND3:100.0:100.0\n" + "ParentIDs:1|3|5|6|7|8\n" + "SubtreeLinkageID1:x(-1+1)x\n" + "RES\n" + "19b:b-dglc-HEX-1:5\n" + "20s:n-acetyl\n" + "21b:b-dgal-HEX-1:5\n" + "LIN\n" + "16:19d(2+1)20n\n" + "17:19o(4+1)21d"; */ ct = ct.replaceAll("\n", "\\\\n").replaceAll("x\\(", "u\\(").replaceAll("\\)x", "\\)u"); System.out.println("new ct: " + ct); String queryString = "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n" + "PREFIX glycan: <http://purl.jp/bio/12/glyco/glycan#>\n" + "PREFIX wurcs: <http://www.glycoinfo.org/glyco/owl/wurcs#>\n" + "SELECT DISTINCT ?glycan ?c\n" + "# FROM <http://rdf.glycoinfo.org/wurcs/0.5.0>\n" + "# FROM <http://rdf.glycoinfo.org/wurcs/0.5.0/ms>\n" + "WHERE {\n" + " ?glycan a \tglycan:glycosequence ;\n" + "\tglycan:in_carbohydrate_format glycan:carbohydrate_format_glycoct ;\n" + "\tglycan:has_sequence\n" + "\t\t?c filter(contains(?c, \"RES\\n1b:b-dglc-HEX-1\")) .\n" + // "\t\t?c filter(contains(?c, \"" + ct + "\" )) .\n" + "\n" + " }\n" + " ORDER BY ?glycan\n" + "limit 10"; System.out.println("String: " + queryString + "\t\tID: " + structure.id); Query query = QueryFactory.create(queryString); QueryExecution qExe = QueryExecutionFactory.sparqlService("http://test.ts.glytoucan.org/sparql", query); ResultSet results = qExe.execSelect(); ResultSetFormatter.out(System.out, results, query); } } }