private Model execute(Model inputModel, String endpoint) { Model cube = createModel(); Resource dataset; Calendar calendar = Calendar.getInstance(TimeZone.getDefault()); dataset = cube.createResource( GK.uri + "Properties_per_Class" + calendar.getTimeInMillis(), QB.Dataset); dataset.addLiteral(RDFS.comment, "Properties per class"); dataset.addLiteral(DCTerms.date, cube.createTypedLiteral(calendar)); dataset.addLiteral(DCTerms.publisher, "R & D, Unister GmbH, Geoknow"); dataset.addProperty(QB.structure, cube.createResource(STRUCTURE)); QueryExecution qExec; if (inputModel != null) { qExec = QueryExecutionFactory.create(INSTANCES, inputModel); } else { qExec = QueryExecutionFactory.sparqlService(endpoint, INSTANCES, defaultGraphs, defaultGraphs); } ResultSet result = qExec.execSelect(); int i = 0; while (result.hasNext()) { Resource owlClass = result.next().getResource("class"); NUMBER_OF_PROPERTIES.setIri("class", owlClass.getURI()); QueryExecution propertiesQexec; if (inputModel != null) { propertiesQexec = QueryExecutionFactory.create(NUMBER_OF_PROPERTIES.asQuery(), inputModel); } else { propertiesQexec = QueryExecutionFactory.sparqlService( endpoint, NUMBER_OF_PROPERTIES.asQuery(), defaultGraphs, defaultGraphs); System.out.println(NUMBER_OF_PROPERTIES.asQuery()); } try { ResultSet propertiesResult = propertiesQexec.execSelect(); if (propertiesResult.hasNext()) { System.out.println(i); Resource obs = cube.createResource( "http://www.geoknow.eu/data-cube/metric2/observation" + i, QB.Observation); obs.addProperty(QB.dataset, dataset); obs.addProperty(GK.DIM.Class, owlClass); obs.addLiteral(GK.MEASURE.PropertyCount, propertiesResult.next().getLiteral("count")); i++; } } catch (Exception e) { System.out.println(i); Resource obs = cube.createResource( "http://www.geoknow.eu/data-cube/metric2/observation" + i, QB.Observation); obs.addProperty(QB.dataset, dataset); obs.addProperty(GK.DIM.Class, owlClass); obs.addLiteral(GK.MEASURE.PropertyCount, -1); obs.addLiteral(RDFS.comment, e.getMessage()); i++; } } return cube; }
private Map<String, Integer> getStats(String sparqlQuery, QueryExecutionFactory qef) { Map<String, Integer> stats = new HashMap<>(); QueryExecution qe = null; try { qe = qef.createQueryExecution(sparqlQuery); ResultSet results = qe.execSelect(); while (results.hasNext()) { QuerySolution qs = results.next(); String s = qs.get("stats").toString(); int c = 0; if (qs.contains("count")) { c = qs.get("count").asLiteral().getInt(); } stats.put(s, c); } } finally { if (qe != null) { qe.close(); } } return stats; }
public Set<NamingIssue> detectNonExactMatchingDirectChildIssues(Model model) { long start = System.currentTimeMillis(); // get SubClass - SuperClass pairs via SPARQL query QueryExecution qe = QueryExecutionFactory.create( "PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#> PREFIX owl:<http://www.w3.org/2002/07/owl#> " + "SELECT * WHERE {?sub a owl:Class .?sup a owl:Class .?sub rdfs:subClassOf ?sup}", model); ResultSet rs = qe.execSelect(); Set<Pair<String, String>> subClassSuperClassPairs = new HashSet<Pair<String, String>>(); while (rs.hasNext()) { QuerySolution qs = rs.next(); String subClass = qs.getResource("sub").getURI(); String superClass = qs.getResource("sup").getURI(); subClassSuperClassPairs.add(new Pair<String, String>(subClass, superClass)); } qe.close(); // compute non matching pairs Set<NamingIssue> nonMatchingChildren = computeNonExactMatchingChildren(subClassSuperClassPairs); long end = System.currentTimeMillis(); logger.info("Operation took " + (end - start) + "ms"); return nonMatchingChildren; }
private ResultSet executeQuery(String queryText, Model dataset) { QueryExecution queryExecution = null; Query query = QueryFactory.create(queryText, SYNTAX); queryExecution = QueryExecutionFactory.create(query, dataset); return queryExecution.execSelect(); }
public String sparql(String qry) { Model model = ModelFactory.createDefaultModel(); try { model.read(new FileInputStream("D:/AI Project/harsh/myNew.owl"), null, "RDF"); } catch (FileNotFoundException e) { e.printStackTrace(); } String res = null; String res1 = null; Query query = QueryFactory.create(qry); QueryExecution exec = QueryExecutionFactory.create(query, model); try { ResultSet rs = exec.execSelect(); while (rs.hasNext()) { QuerySolution soln = rs.nextSolution(); // res = soln.get("dn").toString(); res = soln.get("dn").toString(); System.out.println(res); System.out.println("HAS"); res1 = soln.get("rn").toString(); System.out.println(res1); } } finally { exec.close(); } return res; }
public static Model exec(Model model, final Table table, Query query) throws IOException { OntModel inferencedModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM); ElementData tableElementData = new ElementData() { @Override public Table getTable() { return table; } }; for (Var var : table.getVars()) { tableElementData.add(var); } ElementGroup elementGroup = new ElementGroup(); elementGroup.addElement(tableElementData); if (query.getQueryPattern() instanceof ElementGroup) { for (Element element : ((ElementGroup) query.getQueryPattern()).getElements()) { elementGroup.addElement(element); } } else { elementGroup.addElement(query.getQueryPattern()); } query.setQueryPattern(elementGroup); // QueryExecution ex = QueryExecutionFactory.create(query, model); QueryExecution ex = ARQFactory.get().createQueryExecution(query, model); if (query.isConstructType()) { ex.execConstruct(inferencedModel); } else { inferencedModel.add(ex.execSelect().getResourceModel()); } return inferencedModel; }
public static int getDataMatrix(Model model) { String sparqlQuery = String.format( "PREFIX ot:<%s>\n" + "PREFIX isa:<%s>\n" + "PREFIX dcterms:<http://purl.org/dc/terms/>\n" + "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n" + "PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" + "SELECT ?node ?feature ?title ?value where {\n" + " ?node rdf:type isa:Data." + " ?feature ot:hasSource ?node." + " ?feature dcterms:title ?title." + " ?fv ot:feature ?feature." + " ?fv ot:value ?value." + "} ORDER by ?node ?sample \n", OT.NS, ISA.URI); // Hashtable<String,Hashtable<String,String>> lookup = new Hashtable<String, // Hashtable<String,String>>(); Query query = QueryFactory.create(sparqlQuery); QueryExecution qe = QueryExecutionFactory.create(query, model); ResultSet rs = qe.execSelect(); int row = 0; while (rs.hasNext()) { QuerySolution qs = rs.next(); RDFNode node = qs.get("node"); RDFNode feature = qs.get("feature"); RDFNode title = qs.get("title"); RDFNode value = qs.get("value"); row++; } return row; // lookup; }
public Map<String, List<RDFNode>> queryModel( String queryString, List<String> queryVariables, VirtDataset virtDataset) { Map<String, List<RDFNode>> solution = new HashMap<String, List<RDFNode>>(); QueryExecution qexec = null; try { qexec = QueryExecutionFactory.create(queryString, virtDataset); // ResultSet rs = executeQuery(sb.toString(), virtDataset); } catch (com.hp.hpl.jena.query.QueryParseException e) { System.err.println(ExceptionUtils.getStackTrace(e) + "\n Will return an empty map..."); return Collections.EMPTY_MAP; } ResultSet results = qexec.execSelect(); while (results.hasNext()) { QuerySolution sol = results.next(); for (String variable : queryVariables) { RDFNode nodeVar = sol.get(variable); if (nodeVar != null) { if (solution.get(variable) == null) { solution.put(variable, new ArrayList<RDFNode>()); } solution.get(variable).add(nodeVar); } } } return solution; }
/** * Queries the {@link #rdfEndpoint(String)} with each of the {@link #rdfQueries} and harvests the * results of the query. */ private void harvestFromEndpoint() { Query query; QueryExecution qExec; for (String rdfQuery : rdfQueries) { if (closed) break; logger.info( "Harvesting with query: [{}] on index [{}] and type [{}]", rdfQuery, indexName, typeName); try { query = QueryFactory.create(rdfQuery); } catch (QueryParseException qpe) { logger.error("Could not parse [{}]. Please provide a relevant query. {}", rdfQuery, qpe); continue; } qExec = QueryExecutionFactory.sparqlService(rdfEndpoint, query); try { harvest(qExec); } catch (Exception e) { logger.error("Exception [{}] occurred while harvesting", e.getLocalizedMessage()); } finally { qExec.close(); } } }
protected ResultSet executeSelectQuery(String query, Model model) { logger.trace("Sending query on local model\n{} ...", query); QueryExecutionFactory qef = new QueryExecutionFactoryModel(model); QueryExecution qexec = qef.createQueryExecution(query); ResultSet rs = qexec.execSelect(); return rs; }
private void generateSample() { logger.info("Generating sample..."); sample = ModelFactory.createDefaultModel(); // we have to set up a new query execution factory working on our local model qef = new QueryExecutionFactoryModel(sample); reasoner = new SPARQLReasoner(qef); // get the page size // TODO put to base class long pageSize = 10000; // PaginationUtils.adjustPageSize(globalQef, 10000); ParameterizedSparqlString sampleQueryTemplate = getSampleQuery(); sampleQueryTemplate.setIri("p", entityToDescribe.toStringID()); Query query = sampleQueryTemplate.asQuery(); query.setLimit(pageSize); boolean isEmpty = false; int i = 0; while (!isTimeout() && !isEmpty) { // get next sample logger.debug("Extending sample..."); query.setOffset(i++ * pageSize); QueryExecution qe = ksQef.createQueryExecution(query); Model tmp = qe.execConstruct(); sample.add(tmp); // if last call returned empty model, we can leave loop isEmpty = tmp.isEmpty(); } logger.info("...done. Sample size: " + sample.size() + " triples"); }
/** @inheritDoc */ public ResultSet selectQuery(final String theQuery) throws QueryException { assertConnected(); QueryExecution aQueryExec = query(theQuery); return new JenaResultSet(aQueryExec, aQueryExec.execSelect()); }
public static QuerySolution evaluationOfSPARQLQueryAgainstModel(String queryString, Model model) { // that will be our result QuerySolution soln = null; // Create a SPARQL query from the given string Query query = QueryFactory.create(queryString); // Create a QueryExecution to execute over the Model. QueryExecution qexec = QueryExecutionFactory.create(query, model); try { // Results from a query in a table-like manner for SELECT queries. Each row corresponds to a // set of bindings which fulfill the conditions of the query. Access to the results is by // variable name. ResultSet results = qexec.execSelect(); while (results.hasNext()) { // QuerySolution -- A single answer from a SELECT query. // results.nextSolution() -- Moves to the next result soln = results.nextSolution(); } } finally { qexec.close(); } return soln; }
private static Set<Property> getPermissions(String userPrefix, String username, String uri) { Set<Property> permissions = new HashSet<Property>(); Resource resource = configModel.createResource(uri); Resource user; if (username == null) user = PERM.Public; else user = configModel.createResource(userPrefix + username); StmtIterator stmts = configModel.listStatements(user, null, resource); while (stmts.hasNext()) permissions.add(stmts.next().getPredicate()); String queryString = ""; queryString += "PREFIX perm: <http://vocab.ox.ac.uk/perm#>\n"; queryString += "SELECT ?perm ?regex WHERE {\n"; queryString += " <" + user.getURI() + "> ?perm ?rm ."; queryString += " ?rm a perm:ResourceMatch ;"; queryString += " perm:matchExpression ?regex }"; com.hp.hpl.jena.query.Query query = QueryFactory.create(queryString); QueryExecution qexec = QueryExecutionFactory.create(query, configModel); ResultSet results = qexec.execSelect(); while (results.hasNext()) { QuerySolution sol = results.next(); if (uri.matches(((Literal) sol.get("regex")).getLexicalForm())) permissions.add(configModel.createProperty(((Resource) sol.get("perm")).getURI())); } if (username != null) permissions.addAll(getPermissions(userPrefix, null, uri)); return permissions; }
@Override public List<Literal> getDataPropertyValuesForIndividualByProperty( String subjectUri, String propertyUri) { log.debug("Data property value query string:\n" + DATA_PROPERTY_VALUE_QUERY_STRING); log.debug("Data property value:\n" + dataPropertyValueQuery); QuerySolutionMap initialBindings = new QuerySolutionMap(); initialBindings.add("subject", ResourceFactory.createResource(subjectUri)); initialBindings.add("property", ResourceFactory.createResource(propertyUri)); // Run the SPARQL query to get the properties List<Literal> values = new ArrayList<Literal>(); DatasetWrapper w = dwf.getDatasetWrapper(); Dataset dataset = w.getDataset(); dataset.getLock().enterCriticalSection(Lock.READ); try { QueryExecution qexec = QueryExecutionFactory.create(dataPropertyValueQuery, dataset, initialBindings); ResultSet results = qexec.execSelect(); while (results.hasNext()) { QuerySolution soln = results.next(); RDFNode node = soln.get("value"); if (!node.isLiteral()) { continue; } Literal value = soln.getLiteral("value"); values.add(value); } } finally { dataset.getLock().leaveCriticalSection(); w.close(); } return values; }
@SuppressWarnings("unchecked") private static List<RDFNode> getURIObjects() { List<RDFNode> objectsURIs = new ArrayList<RDFNode>(); if (demo) { // Deserialize the results if exists (For Demo purpose) if (useCache) { try { List<String> ser = new ArrayList<String>(); File file = new File("URIObjects.ser"); if (file.exists()) { ObjectInputStream in; in = new ObjectInputStream(new FileInputStream(file)); ser = (List<String>) in.readObject(); in.close(); // convert every object back from string for (String n : ser) { objectsURIs.add(ResourceFactory.createResource(n)); } return objectsURIs; } } catch (Exception e) { e.printStackTrace(); } } } // create a query to retrieve URIs objects String queryString = "SELECT * " + "WHERE { ?s ?p ?o . FILTER (isURI(?o)) . " + "FILTER (STRSTARTS(STR(?o), \"" + resourcePrefix + "\"))}"; Query query = QueryFactory.create(queryString); QueryExecution exec = QueryExecutionFactory.create(query, localModel); ResultSet rs = exec.execSelect(); while (rs.hasNext()) { QuerySolution sol = rs.next(); RDFNode object = sol.get("?o"); objectsURIs.add(object); } if (demo) { // serialize the output (for Demo purpose) try { FileOutputStream fileOut = new FileOutputStream("URIObjects.ser"); ObjectOutputStream out = new ObjectOutputStream(fileOut); // convert to Serializabe Strings List<String> l = new ArrayList<String>(); for (RDFNode n : objectsURIs) { l.add(n.toString()); } out.writeObject(l); out.close(); } catch (Exception e2) { e2.printStackTrace(); } } return objectsURIs; }
public static void main(String[] args) { Store store = HBaseRdfFactory.connectStore("Store/hbaserdf-simple.ttl"); store.getTableFormatter().format(); Model model = HBaseRdfFactory.connectDefaultModel(store); model.add( model.createResource("http://example.org/person#John"), VCARD.FN, model.asRDFNode(Node.createLiteral("John Smith"))); model.add( model.createResource("http://example.org/person#John"), VCARD.EMAIL, model.asRDFNode(Node.createLiteral("*****@*****.**"))); model.add( model.createResource("http://example.org/person#Jim"), VCARD.FN, model.asRDFNode(Node.createLiteral("Jim Mason"))); model.add( model.createResource("http://example.org/person#Jim"), VCARD.EMAIL, model.asRDFNode(Node.createLiteral("*****@*****.**"))); model.add( model.createResource("http://example.org/person#Bob"), VCARD.FN, model.asRDFNode(Node.createLiteral("Bob Brown"))); model.add( model.createResource("http://example.org/person#Bob"), VCARD.EMAIL, model.asRDFNode(Node.createLiteral("*****@*****.**"))); StmtIterator iter = model.listStatements(); while (iter.hasNext()) { System.out.println(iter.next().toString()); } iter = model.getResource("http://example.org/person#John").listProperties(); while (iter.hasNext()) { System.out.println(iter.next().toString()); } ResIterator resIter = model.listSubjects(); while (resIter.hasNext()) { System.out.println(resIter.next().toString()); } String query = " PREFIX vcard: <http://www.w3.org/2001/vcard-rdf/3.0#> " + " SELECT ?x " + " WHERE " + " { " + " <http://example.org/person#John> vcard:FN ?x " + " } "; QueryExecution qe = QueryExecutionFactory.create(query, model); ResultSet rs = qe.execSelect(); ResultSetFormatter.out(rs); }
private void resolveChildren(ModelTreeNode modelTreeNode, String oldModelUrl) throws IOException { ZdoModel model = modelTreeNode.getModel(); // If there is also a published version of this doc, find it, remove it and adopt all its // children if (oldModelUrl != null) { // Change its children to be ours String queryString = "SELECT ?subject WHERE {\n" + " ?subject <" + DCTerms.isPartOf.getURI() + "> <" + oldModelUrl + ">.\n" + "}"; QueryExecution queryExecution = QueryExecutionFactory.sparqlService(SPARQL_ENDPOINT, queryString); ResultSet resultSet = queryExecution.execSelect(); while (resultSet.hasNext()) { QuerySolution querySolution = resultSet.next(); RDFNode childNode = querySolution.get("subject"); String childToAdoptUrl = childNode.asResource().getURI(); ZdoModel childModel = store.get(childToAdoptUrl); childModel.replaceValueOfProperty( DCTerms.isPartOf, store.removeTransactionFromUrl(model.getUrl())); // If this children was published if (ZdoGroup.ZDO.name().equals(childModel.get(ZdoTerms.group))) { // Is this children getting replaced by newer version? if (modelTreeNodeKdrIndex.containsKey(childModel.get(ZdoTerms.kdrObject))) { // Yes, unpublish it ZdoModel childKdrObject = store.get(childModel.get(ZdoTerms.kdrObject)); markAsUnpublished(childModel, childKdrObject); store.update(childKdrObject); } else { // No, it should be added to our tree to solr ModelTreeNode childModelTreeNode = new ModelTreeNode(); childModelTreeNode.setModel(childModel); modelTreeNodeKdrIndex.put(childModel.get(ZdoTerms.kdrObject), childModelTreeNode); modelTreeNodeIndex.put( store.removeTransactionFromUrl(childModel.getUrl()), modelTreeNode); modelTreeNode.getChildren().add(childModelTreeNode); } } store.update(childModel); } } // Recurse on children for (ModelTreeNode childNode : modelTreeNode.getChildren()) { // Get kdr version of this doc ZdoModel kdrDoc = store.get(childNode.getModel().get(ZdoTerms.kdrObject)); resolveChildren(childNode, kdrDoc.get(ZdoTerms.newestPublished)); kdrDoc.replaceValueOfProperty( ZdoTerms.newestPublished, store.removeTransactionFromUrl(childNode.getModel().getUrl())); store.update(kdrDoc); } }
private long doExecuteSparql(VitroRequest vreq) { OntModel jenaOntModel = ModelAccess.on(getServletContext()).getOntModel(); OntModel source = ModelFactory.createOntologyModel(OntModelSpec.OWL_DL_MEM); String[] sourceModel = vreq.getParameterValues("sourceModelName"); for (int i = 0; i < sourceModel.length; i++) { Model m = getModel(sourceModel[i], vreq); source.addSubModel(m); } Model destination = getModel(vreq.getParameter("destinationModelName"), vreq); String sparqlQueryStr = vreq.getParameter("sparqlQueryStr"); String savedQueryURIStr = vreq.getParameter("savedQuery"); String queryStr; if (savedQueryURIStr.length() == 0) { log.debug("Using entered query"); queryStr = sparqlQueryStr; } else { Property queryStrProp = ResourceFactory.createProperty(SPARQL_QUERYSTR_PROP); jenaOntModel.enterCriticalSection(Lock.READ); try { Individual ind = jenaOntModel.getIndividual(savedQueryURIStr); log.debug("Using query " + savedQueryURIStr); queryStr = ((Literal) ind.getPropertyValue(queryStrProp)).getLexicalForm(); queryStr = StringEscapeUtils.unescapeHtml( queryStr); // !!! We need to turn off automatic HTML-escaping for data property // editing. } finally { jenaOntModel.leaveCriticalSection(); } } Model tempModel = ModelFactory.createDefaultModel(); Query query = SparqlQueryUtils.create(queryStr); QueryExecution qexec = QueryExecutionFactory.create(query, source); try { qexec.execConstruct(tempModel); } catch (QueryExecException qee) { qexec.execDescribe(tempModel); } destination.enterCriticalSection(Lock.WRITE); try { if (destination instanceof OntModel) { ((OntModel) destination).getBaseModel().notifyEvent(new EditEvent(null, true)); } else { destination.notifyEvent(new EditEvent(null, true)); } destination.add(tempModel); } finally { if (destination instanceof OntModel) { ((OntModel) destination).getBaseModel().notifyEvent(new EditEvent(null, false)); } else { destination.notifyEvent(new EditEvent(null, false)); } destination.leaveCriticalSection(); } return tempModel.size(); }
public Results executeSelectQuery(Query sparql, QuerySolution initialBindings) { try { Data data = getData(); QueryExecution queryExec = getQueryExecution(sparql, initialBindings, data); return new Results(queryExec.execSelect(), queryExec, data); } catch (DataException e) { e.printStackTrace(); return null; } }
/** @inheritDoc */ public Graph describe(final URI theURI) throws DataSourceException { assertConnected(); QueryExecution aQueryExec = query("describe <" + theURI + ">"); try { return JenaSesameUtils.asSesameGraph(aQueryExec.execDescribe()); } finally { aQueryExec.close(); } }
/** @inheritDoc */ public Graph graphQuery(final String theQuery) throws QueryException { assertConnected(); QueryExecution aQueryExec = query(theQuery); try { return JenaSesameUtils.asSesameGraph(aQueryExec.execConstruct()); } finally { aQueryExec.close(); } }
/** * Execute an ASK query * * @param query * @return */ public boolean executeAskQuery(String query) { Query query1 = QueryFactory.create(query); QueryExecution qexec = QueryExecutionFactory.create(query1, ontologie); try { if (query.toLowerCase().contains("ask")) { return (qexec.execAsk()); } } finally { qexec.close(); } return false; }
// method to get Hospital list given treatment type public static void issueSPARQLTreatment_Hospital( Model m, String treatMent_Name, HashMap hos_treatment) { /** * **************************************Getting test name type by cancer * name******************************************************** */ String causes_val = treatMent_Name; // String drug="HPV"; String defaultNameSpace = "http://www.semanticweb.org/janani/ontologies/2015/3/cancer-treatment#"; String offersNameSpace = "<http://www.semanticweb.org/janani/ontologies/2015/3/cancer-treatment#offers>"; String ntemp1 = "<" + defaultNameSpace + treatMent_Name + ">"; String temp1 = ""; temp1 = temp1 + " ?Hospital " + offersNameSpace + " " + ntemp1 + " ."; String queryStringDrug = "SELECT ?Hospital " + "WHERE {" + temp1 + " }"; System.out.println(queryStringDrug); Query query1 = QueryFactory.create(queryStringDrug); QueryExecution qe1 = QueryExecutionFactory.create(query1, m); ResultSet results1 = qe1.execSelect(); // ResultSetFormatter.out(System.out, results1, query1); RDFNode hospital_name; // System.out.println("Treatment is: "+ treatMent_Name); // System.out.println("Hospitals are: "); while (results1.hasNext()) { String[] Hos_name; QuerySolution querySolution = results1.next(); hospital_name = querySolution.get("Hospital"); String temp_val = String.valueOf(hospital_name); Hos_name = temp_val.split("#"); // System.out.println(Hos_name[1]); hos_treatment.put(treatMent_Name, Hos_name[1]); // if(!Hospital_list.contains(Hos_name[1])) // { // Hospital_list.add(Hos_name[1]); // // System.out.println(Hos_name[1]); // } } }
public static String execSparQLQuery(String query) { System.out.println("execSPINQuery"); Model model = getUqModel(); // Register system functions (such as sp:gt (>)) SPINModuleRegistry.get().init(); Query arqQuery = ARQFactory.get().createQuery(model, query); ARQ2SPIN arq2SPIN = new ARQ2SPIN(model); Select spinQuery = (Select) arq2SPIN.createQuery(arqQuery, null); System.out.println("SPIN query in Turtle:"); model.write(System.out, FileUtils.langTurtle); System.out.println("-----"); String str = spinQuery.toString(); System.out.println("SPIN query:\n" + str); // Now turn it back into a Jena Query Query parsedBack = ARQFactory.get().createQuery(spinQuery); System.out.println("Jena query:\n" + parsedBack); com.hp.hpl.jena.query.Query arq = ARQFactory.get().createQuery(spinQuery); QueryExecution qexec = ARQFactory.get().createQueryExecution(arq, model); QuerySolutionMap arqBindings = new QuerySolutionMap(); arqBindings.add("predicate", RDFS.label); qexec.setInitialBinding(arqBindings); // Pre-assign the arguments ResultSet rs = qexec.execSelect(); // System.out.println("#####################################################################"); // // if (rs.hasNext()) { // QuerySolution row = rs.next(); // System.out.println("Row: " +row.toString()); // RDFNode user = row.get("User"); // Literal label = row.getLiteral("label"); // System.out.println(user.toString()); // } // RDFNode object = rs.next().get("object"); // System.out.println("Label is " + object); Collection<User> users = Sparql.exec(getUqModel(), User.class, query); String usersString = ""; for (User user : users) { System.out.println("User: "******"<br/>"; } System.out.println("execSPINQuery() done."); return usersString; }
void runTestSelect(Query query, QueryExecution qe) throws Exception { // Do the query! ResultSetRewindable resultsActual = ResultSetFactory.makeRewindable(qe.execSelect()); qe.close(); if (results == null) return; // Assumes resultSetCompare can cope with full isomorphism possibilities. ResultSetRewindable resultsExpected; if (results.isResultSet()) resultsExpected = ResultSetFactory.makeRewindable(results.getResultSet()); else if (results.isModel()) resultsExpected = ResultSetFactory.makeRewindable(results.getModel()); else { fail("Wrong result type for SELECT query"); resultsExpected = null; // Keep the compiler happy } if (query.isReduced()) { // Reduced - best we can do is DISTINCT resultsExpected = unique(resultsExpected); resultsActual = unique(resultsActual); } // Hack for CSV : tests involving bNodes need manually checking. if (testItem.getResultFile().endsWith(".csv")) { resultsActual = convertToStrings(resultsActual); resultsActual.reset(); int nActual = ResultSetFormatter.consume(resultsActual); int nExpected = ResultSetFormatter.consume(resultsExpected); resultsActual.reset(); resultsExpected.reset(); assertEquals("CSV: Different number of rows", nExpected, nActual); boolean b = resultSetEquivalent(query, resultsExpected, resultsActual); if (!b) System.out.println("Manual check of CSV results required: " + testItem.getName()); return; } boolean b = resultSetEquivalent(query, resultsExpected, resultsActual); if (!b) { resultsExpected.reset(); resultsActual.reset(); boolean b2 = resultSetEquivalent(query, resultsExpected, resultsActual); printFailedResultSetTest(query, qe, resultsExpected, resultsActual); } assertTrue("Results do not match: " + testItem.getName(), b); return; }
public Boolean executeAskQuery(Query sparql, QuerySolution initialBindings) { try { Data data = getData(); QueryExecution queryExec = getQueryExecution(sparql, initialBindings, data); boolean result = queryExec.execAsk(); queryExec.close(); data.close(); return result; } catch (DataException e) { e.printStackTrace(); return null; } }
public Model executeDescribeQuery(Query sparql, QuerySolution initialBindings) { try { Data data = getData(); QueryExecution queryExec = getQueryExecution(sparql, initialBindings, data); Model result = queryExec.execDescribe(); queryExec.close(); data.close(); return result; } catch (DataException e) { e.printStackTrace(); return null; } }
/** * @see org.caboto.jena.db.Database#executeConstructQuery(com.hp.hpl.jena.query.Query, * com.hp.hpl.jena.query.QuerySolution) */ public Model executeConstructQuery(Query query, QuerySolution initialBindings) { try { Data data = getData(); QueryExecution queryExec = getQueryExecution(query, initialBindings, data); Model model = queryExec.execConstruct(); queryExec.close(); data.close(); return model; } catch (DataException e) { e.printStackTrace(); return null; } }
public static ResultSet ExecuteQueryByModel(String queryString, Model m) { /* * com.hp.hpl.jena.query.Query query = QueryFactory.create(queryString); * // Execute the query and obtain results * com.hp.hpl.jena.query.QueryExecution qe = * QueryExecutionFactory.create( query, Syntax.syntaxARQ, m); * * com.hp.hpl.jena.query.ResultSet results = qe.execSelect(); */ QueryExecution qe = QueryExecutionFactory.create(queryString, Syntax.syntaxARQ, m); ResultSet results = qe.execSelect(); return results; }