/** * Queries the {@link #rdfEndpoint(String)} with each of the {@link #rdfQueries} and harvests the * results of the query. */ private void harvestFromEndpoint() { Query query; QueryExecution qExec; for (String rdfQuery : rdfQueries) { if (closed) break; logger.info( "Harvesting with query: [{}] on index [{}] and type [{}]", rdfQuery, indexName, typeName); try { query = QueryFactory.create(rdfQuery); } catch (QueryParseException qpe) { logger.error("Could not parse [{}]. Please provide a relevant query. {}", rdfQuery, qpe); continue; } qExec = QueryExecutionFactory.sparqlService(rdfEndpoint, query); try { harvest(qExec); } catch (Exception e) { logger.error("Exception [{}] occurred while harvesting", e.getLocalizedMessage()); } finally { qExec.close(); } } }
void runTestSelect(Query query, QueryExecution qe) throws Exception { // Do the query! ResultSetRewindable resultsActual = ResultSetFactory.makeRewindable(qe.execSelect()); qe.close(); if (results == null) return; // Assumes resultSetCompare can cope with full isomorphism possibilities. ResultSetRewindable resultsExpected; if (results.isResultSet()) resultsExpected = ResultSetFactory.makeRewindable(results.getResultSet()); else if (results.isModel()) resultsExpected = ResultSetFactory.makeRewindable(results.getModel()); else { fail("Wrong result type for SELECT query"); resultsExpected = null; // Keep the compiler happy } if (query.isReduced()) { // Reduced - best we can do is DISTINCT resultsExpected = unique(resultsExpected); resultsActual = unique(resultsActual); } // Hack for CSV : tests involving bNodes need manually checking. if (testItem.getResultFile().endsWith(".csv")) { resultsActual = convertToStrings(resultsActual); resultsActual.reset(); int nActual = ResultSetFormatter.consume(resultsActual); int nExpected = ResultSetFormatter.consume(resultsExpected); resultsActual.reset(); resultsExpected.reset(); assertEquals("CSV: Different number of rows", nExpected, nActual); boolean b = resultSetEquivalent(query, resultsExpected, resultsActual); if (!b) System.out.println("Manual check of CSV results required: " + testItem.getName()); return; } boolean b = resultSetEquivalent(query, resultsExpected, resultsActual); if (!b) { resultsExpected.reset(); resultsActual.reset(); boolean b2 = resultSetEquivalent(query, resultsExpected, resultsActual); printFailedResultSetTest(query, qe, resultsExpected, resultsActual); } assertTrue("Results do not match: " + testItem.getName(), b); return; }
private void setAnyTimeouts(QueryExecution qexec, HttpAction action) { // if ( !(action.getDataService().allowTimeoutOverride) ) // return ; long desiredTimeout = Long.MAX_VALUE; String timeoutHeader = action.request.getHeader("Timeout"); String timeoutParameter = action.request.getParameter("timeout"); if (timeoutHeader != null) { try { desiredTimeout = (int) (Float.parseFloat(timeoutHeader) * 1000); } catch (NumberFormatException e) { throw new FusekiException("Timeout header must be a number", e); } } else if (timeoutParameter != null) { try { desiredTimeout = (int) (Float.parseFloat(timeoutParameter) * 1000); } catch (NumberFormatException e) { throw new FusekiException("timeout parameter must be a number", e); } } // desiredTimeout = Math.min(action.getDataService().maximumTimeoutOverride, // desiredTimeout) ; if (desiredTimeout != Long.MAX_VALUE) qexec.setTimeout(desiredTimeout); }
/** * Query SPARQL endpoint with a SELECT query * * @param qExec QueryExecution encapsulating the query * @return model retrieved by querying the endpoint */ private Model getSelectModel(QueryExecution qExec) { Model model = ModelFactory.createDefaultModel(); Graph graph = model.getGraph(); ResultSet results = qExec.execSelect(); while (results.hasNext()) { QuerySolution sol = results.next(); String subject; String predicate; RDFNode object; try { subject = sol.getResource("s").toString(); predicate = sol.getResource("p").toString(); object = sol.get("o"); } catch (NoSuchElementException e) { logger.error("SELECT query does not return a (?s ?p ?o) Triple"); continue; } Node objNode; if (object.isLiteral()) { Literal obj = object.asLiteral(); objNode = NodeFactory.createLiteral(obj.getString(), obj.getDatatype()); } else { objNode = NodeFactory.createLiteral(object.toString()); } graph.add( new Triple(NodeFactory.createURI(subject), NodeFactory.createURI(predicate), objNode)); } return model; }
/** * Perform the {@link QueryExecution} once. * * @param action * @param queryExecution * @param query * @param queryStringLog Informational string created from the initial query. * @return */ protected SPARQLResult executeQuery( HttpAction action, QueryExecution queryExecution, Query query, String queryStringLog) { setAnyTimeouts(queryExecution, action); if (query.isSelectType()) { ResultSet rs = queryExecution.execSelect(); // Force some query execution now. // If the timeout-first-row goes off, the output stream has not // been started so the HTTP error code is sent. rs.hasNext(); // If we wanted perfect query time cancellation, we could consume // the result now to see if the timeout-end-of-query goes off. // rs = ResultSetFactory.copyResults(rs) ; // action.log.info(format("[%d] exec/select", action.id)) ; return new SPARQLResult(rs); } if (query.isConstructType()) { Dataset dataset = queryExecution.execConstructDataset(); // action.log.info(format("[%d] exec/construct", action.id)); return new SPARQLResult(dataset); } if (query.isDescribeType()) { Model model = queryExecution.execDescribe(); // action.log.info(format("[%d] exec/describe", action.id)) ; return new SPARQLResult(model); } if (query.isAskType()) { boolean b = queryExecution.execAsk(); // action.log.info(format("[%d] exec/ask", action.id)) ; return new SPARQLResult(b); } ServletOps.errorBadRequest("Unknown query type - " + queryStringLog); return null; }
@Override protected void runTestForReal() throws Throwable { Query query = null; try { try { query = queryFromTestItem(testItem); } catch (QueryException qEx) { query = null; qEx.printStackTrace(System.err); fail("Parse failure: " + qEx.getMessage()); throw qEx; } Dataset dataset = setUpDataset(query, testItem); if (dataset == null && !doesQueryHaveDataset(query)) fail("No dataset for query"); QueryExecution qe = null; if (dataset == null) qe = QueryExecutionFactory.create(query, queryFileManager); else qe = QueryExecutionFactory.create(query, dataset); try { if (query.isSelectType()) runTestSelect(query, qe); else if (query.isConstructType()) runTestConstruct(query, qe); else if (query.isDescribeType()) runTestDescribe(query, qe); else if (query.isAskType()) runTestAsk(query, qe); } finally { qe.close(); } } catch (IOException ioEx) { // log.debug("IOException: ",ioEx) ; fail("IOException: " + ioEx.getMessage()); throw ioEx; } catch (NullPointerException ex) { throw ex; } catch (Exception ex) { ex.printStackTrace(System.err); fail("Exception: " + ex.getClass().getName() + ": " + ex.getMessage()); } }
/** * Returns the string value of the first of the properties in the uriDescriptionList for the given * resource (as an URI). In case the resource does not have any of the properties mentioned, its * URI is returned. The value is obtained by querying the endpoint and the endpoint is queried * repeatedly until it gives a response (value or the lack of it) * * <p>It is highly recommended that the list contains properties like labels or titles, with test * values. * * @param uri - the URI for which a label is required * @return a String value, either a label for the parameter or its value if no label is obtained * from the endpoint */ private String getLabelForUri(String uri) { String result; if (uriLabelCache.containsKey(uri)) { return uriLabelCache.get(uri); } for (String prop : uriDescriptionList) { String innerQuery = "SELECT ?r WHERE {<" + uri + "> <" + prop + "> ?r } LIMIT 1"; try { Query query = QueryFactory.create(innerQuery); QueryExecution qExec = QueryExecutionFactory.sparqlService(rdfEndpoint, query); boolean keepTrying = true; while (keepTrying) { keepTrying = false; try { ResultSet results = qExec.execSelect(); if (results.hasNext()) { QuerySolution sol = results.nextSolution(); result = EEASettings.parseForJson(sol.getLiteral("r").getLexicalForm()); if (!result.isEmpty()) { uriLabelCache.put(uri, result); return result; } } } catch (Exception e) { keepTrying = true; logger.warn("Could not get label for uri {}. Retrying.", uri); } finally { qExec.close(); } } } catch (QueryParseException qpe) { logger.error("Exception for query {}. The label cannot be obtained", innerQuery); } } return uri; }
/** * Get a set of unique queryObjName returned from a select query * * <p>Used to retrieve sets of modified objects used in sync * * @param rdfQuery query to execute * @param queryObjName name of the object returned * @return set of values for queryObjectName in the rdfQuery result */ HashSet<String> executeSyncQuery(String rdfQuery, String queryObjName) { HashSet<String> rdfUrls = new HashSet<String>(); Query query; try { query = QueryFactory.create(rdfQuery); } catch (QueryParseException qpe) { logger.warn( "Could not parse [{}]. Please provide a relevant query. {}", rdfQuery, qpe.getLocalizedMessage()); return null; } QueryExecution qExec = QueryExecutionFactory.sparqlService(rdfEndpoint, query); try { ResultSet results = qExec.execSelect(); while (results.hasNext()) { QuerySolution sol = results.nextSolution(); try { String value = sol.getResource(queryObjName).toString(); rdfUrls.add(value); } catch (NoSuchElementException e) { logger.error("Encountered a NoSuchElementException: " + e.getLocalizedMessage()); return null; } } } catch (Exception e) { logger.error( "Encountered a [{}] while querying the endpoint for sync", e.getLocalizedMessage()); return null; } finally { qExec.close(); } return rdfUrls; }
void runTestAsk(Query query, QueryExecution qe) throws Exception { boolean result = qe.execAsk(); if (results != null) { if (results.isBoolean()) { boolean b = results.getBooleanResult(); assertEquals("ASK test results do not match", b, result); } else { Model resultsAsModel = results.getModel(); StmtIterator sIter = results.getModel().listStatements(null, RDF.type, ResultSetGraphVocab.ResultSet); if (!sIter.hasNext()) throw new QueryTestException("Can't find the ASK result"); Statement s = sIter.nextStatement(); if (sIter.hasNext()) throw new QueryTestException("Too many result sets in ASK result"); Resource r = s.getSubject(); Property p = resultsAsModel.createProperty(ResultSetGraphVocab.getURI() + "boolean"); boolean x = r.getRequiredProperty(p).getBoolean(); if (x != result) assertEquals("ASK test results do not match", x, result); } } return; }
void runTestDescribe(Query query, QueryExecution qe) throws Exception { Model resultsActual = qe.execDescribe(); compareGraphResults(resultsActual, query); }
void runTestConstruct(Query query, QueryExecution qe) throws Exception { // Do the query! Model resultsActual = qe.execConstruct(); compareGraphResults(resultsActual, query); }
/** * Query SPARQL endpoint with a DESCRIBE query * * @param qExec QueryExecution encapsulating the query * @return model retrieved by querying the endpoint */ private Model getDescribeModel(QueryExecution qExec) { return qExec.execDescribe(ModelFactory.createDefaultModel()); }
/** * Query SPARQL endpoint with a CONSTRUCT query * * @param qExec QueryExecution encapsulating the query * @return model retrieved by querying the endpoint */ private Model getConstructModel(QueryExecution qExec) { return qExec.execConstruct(ModelFactory.createDefaultModel()); }
/** * Starts a harvester with predefined queries to synchronize with the changes from the SPARQL * endpoint */ public boolean sync() { logger.info("Sync resources newer than {}", startTime); String rdfQueryTemplate = "PREFIX xsd:<http://www.w3.org/2001/XMLSchema#> " + "SELECT DISTINCT ?resource WHERE { " + " GRAPH ?graph { %s }" + " ?graph <%s> ?time . %s " + " FILTER (?time > xsd:dateTime(\"%s\")) }"; String queryStr = String.format( rdfQueryTemplate, syncConditions, syncTimeProp, graphSyncConditions, startTime); Set<String> syncUris = executeSyncQuery(queryStr, "resource"); if (syncUris == null) { logger.error("Errors occurred during sync procedure. Aborting!"); return false; } /** * If desired, query for old data that has the sync conditions modified * * <p>This option is useful in the case in which the application indexes resources that match * some conditions. In this case, if they are modified and no longer match the initial * conditions, they will not be synchronized. When syncOldData is True, the modified resources * that no longer match the conditions are deleted. */ int deleted = 0; int count = 0; if (this.syncOldData) { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); queryStr = String.format( rdfQueryTemplate, syncConditions, syncTimeProp, graphSyncConditions, sdf.format(new Date(0))); HashSet<String> allIndexURIs = executeSyncQuery(queryStr, "resource"); if (allIndexURIs == null) { logger.error("Errors occurred during modified content sync query. Aborting!"); return false; } deleted = removeMissingUris(allIndexURIs); } /* Prepare a series of bulk uris to be described so we can make * a smaller number of calls to the SPARQL endpoint. */ ArrayList<ArrayList<String>> bulks = new ArrayList<ArrayList<String>>(); ArrayList<String> currentBulk = new ArrayList<String>(); for (String uri : syncUris) { currentBulk.add(uri); if (currentBulk.size() == EEASettings.DEFAULT_BULK_SIZE) { bulks.add(currentBulk); currentBulk = new ArrayList<String>(); } } if (currentBulk.size() > 0) { bulks.add(currentBulk); } /* Execute RDF queries for the resources in each bulk */ for (ArrayList<String> bulk : bulks) { String syncQuery = getSyncQueryStr(bulk); try { Query query = QueryFactory.create(syncQuery); QueryExecution qExec = QueryExecutionFactory.sparqlService(rdfEndpoint, query); try { Model constructModel = ModelFactory.createDefaultModel(); qExec.execConstruct(constructModel); BulkRequestBuilder bulkRequest = client.prepareBulk(); /** * When adding the model to ES do not use toDescribeURIs as the query already returned the * correct labels. */ addModelToES(constructModel, bulkRequest, false); count += bulk.size(); } catch (Exception e) { logger.error("Error while querying for modified content. {}", e.getLocalizedMessage()); return false; } finally { qExec.close(); } } catch (QueryParseException qpe) { logger.warn( "Could not parse Sync query. Please provide a relevant query. {}", qpe.getLocalizedMessage()); return false; } } logger.info( "Finished synchronisation: Deleted {}, Updated {}/{}", deleted, count, syncUris.size()); return true; }