@Override public int runQuery(Query q, int run) throws Exception { TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, q.getQuery()); TupleQueryResult res = (TupleQueryResult) query.evaluate(); int resCounter = 0; try { while (res.hasNext()) { if (isInterrupted()) throw new QueryEvaluationException("Thread has been interrupted."); BindingSet bindings = res.next(); resCounter++; earlyResults.handleResult(bindings, resCounter); } } finally { res.close(); } return resCounter; }
@Override public int runQueryDebug(Query q, int run, boolean showResult) throws Exception { TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, q.getQuery()); TupleQueryResult res = (TupleQueryResult) query.evaluate(); int resCounter = 0; TupleQueryResultWriter writer = null; boolean writerStarted = false; if (showResult) { OutputStream results = new FileOutputStream( Config.getConfig().getBaseDir() + "/result/" + q.getIdentifier() + "_" + run + ".csv"); TupleQueryResultWriterFactory factory = new SPARQLResultsCSVWriterFactory(); writer = factory.getWriter(results); } while (res.hasNext()) { if (isInterrupted()) throw new QueryEvaluationException("Thread has been interrupted."); BindingSet bindings = res.next(); if (showResult) { if (!writerStarted) { writer.startQueryResult(res.getBindingNames()); writerStarted = true; } writer.handleSolution(bindings); } resCounter++; earlyResults.handleResult(bindings, resCounter); } if (writerStarted) writer.endQueryResult(); return resCounter; }