private String toString(TupleQueryResult results)
      throws QueryResultHandlerException, TupleQueryResultHandlerException,
          QueryEvaluationException, UnsupportedEncodingException {
    TupleQueryResultFormat format = getTupleFormat();
    ByteArrayOutputStream out = new ByteArrayOutputStream(4096);
    TupleQueryResultWriter writer = QueryResultIO.createTupleWriter(format, out);
    writer.startDocument();
    writer.startHeader();
    writer.handleLinks(Arrays.<String>asList());
    QueryResults.report(results, writer);

    return out.toString("UTF-8");
  }
示例#2
0
  @Override
  public int runQueryDebug(Query q, int run, boolean showResult) throws Exception {
    TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, q.getQuery());
    TupleQueryResult res = (TupleQueryResult) query.evaluate();
    int resCounter = 0;

    TupleQueryResultWriter writer = null;
    boolean writerStarted = false;

    if (showResult) {
      OutputStream results =
          new FileOutputStream(
              Config.getConfig().getBaseDir()
                  + "/result/"
                  + q.getIdentifier()
                  + "_"
                  + run
                  + ".csv");
      TupleQueryResultWriterFactory factory = new SPARQLResultsCSVWriterFactory();
      writer = factory.getWriter(results);
    }

    while (res.hasNext()) {
      if (isInterrupted()) throw new QueryEvaluationException("Thread has been interrupted.");
      BindingSet bindings = res.next();
      if (showResult) {
        if (!writerStarted) {
          writer.startQueryResult(res.getBindingNames());
          writerStarted = true;
        }

        writer.handleSolution(bindings);
      }

      resCounter++;
      earlyResults.handleResult(bindings, resCounter);
    }

    if (writerStarted) writer.endQueryResult();

    return resCounter;
  }