protected Object performSelect(String query, String auth, Boolean infer)
     throws RepositoryException, MalformedQueryException, QueryEvaluationException,
         TupleQueryResultHandlerException {
   TupleQuery tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, query);
   if (auth != null && auth.length() > 0)
     tupleQuery.setBinding(CONF_QUERY_AUTH, valueFactory.createLiteral(auth));
   if (infer != null) tupleQuery.setBinding(CONF_INFER, valueFactory.createLiteral(infer));
   if (CbSailEndpoint.CbSailOutput.BINARY.equals(queryOutput)) {
     final List listOutput = new ArrayList();
     TupleQueryResultHandlerBase handler =
         new TupleQueryResultHandlerBase() {
           @Override
           public void handleSolution(BindingSet bindingSet)
               throws TupleQueryResultHandlerException {
             Map<String, String> map = new HashMap<String, String>();
             for (String s : bindingSet.getBindingNames()) {
               map.put(s, bindingSet.getBinding(s).getValue().stringValue());
             }
             listOutput.add(map);
           }
         };
     tupleQuery.evaluate(handler);
     return listOutput;
   } else if (CbSailEndpoint.CbSailOutput.XML.equals(queryOutput)) {
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     SPARQLResultsXMLWriter sparqlWriter = new SPARQLResultsXMLWriter(baos);
     tupleQuery.evaluate(sparqlWriter);
     return new String(baos.toByteArray());
   } else {
     throw new IllegalArgumentException("Query Output[" + queryOutput + "] is not recognized");
   }
 }
Exemple #2
0
  public Collection<String> getAllDisjointProperties(String startingProperty)
      throws RepositoryException, MalformedQueryException, QueryEvaluationException {
    Set<String> allDisjointProperties = new HashSet<>();
    RepositoryConnection con = this.repository.getConnection();
    try {
      TupleQuery allDisjointPropertiesQuery =
          con.prepareTupleQuery(
              QueryLanguage.SPARQL,
              "SELECT ?items  "
                  + "WHERE { ?y a owl:AllDisjointProperties ."
                  + "?y owl:members ?members ."
                  + "?members rdf:rest*/rdf:first <"
                  + startingProperty
                  + "> ."
                  + "?members rdf:rest*/rdf:first ?items ."
                  + "FILTER (?items != <"
                  + startingProperty
                  + ">) .}");

      TupleQueryResult allDisjointPropertiesResult = allDisjointPropertiesQuery.evaluate();
      try {
        while (allDisjointPropertiesResult.hasNext()) {
          BindingSet bindingSet = allDisjointPropertiesResult.next();
          Value name = bindingSet.getValue("items");
          allDisjointProperties.add(name.toString());
        }
      } finally {
        allDisjointPropertiesResult.close();
      }
    } finally {
      con.close();
    }
    return allDisjointProperties;
  }
Exemple #3
0
 public Collection<String> getIntersectionOf(String startingClass)
     throws RepositoryException, MalformedQueryException, QueryEvaluationException {
   Set<String> intersectionOfClasses = new HashSet<>();
   RepositoryConnection con = this.repository.getConnection();
   try {
     TupleQuery intersectionOfQuery =
         con.prepareTupleQuery(
             QueryLanguage.SPARQL,
             "SELECT ?c WHERE {  ?c owl:intersectionOf ?l .  "
                 + "?l rdf:rest*/rdf:first <"
                 + startingClass
                 + "> .}");
     TupleQueryResult intersectionOfResult = intersectionOfQuery.evaluate();
     try {
       while (intersectionOfResult.hasNext()) {
         BindingSet bindingSet = intersectionOfResult.next();
         Value name = bindingSet.getValue("c");
         intersectionOfClasses.add(name.toString());
         // System.out.println("intersections Ofs : " + name);
       }
     } finally {
       intersectionOfResult.close();
     }
   } finally {
     con.close();
   }
   return intersectionOfClasses;
 }
Exemple #4
0
  /**
   * Execute a SELECT SPARQL query against the graphs
   *
   * @param qs SELECT SPARQL query
   * @return list of solutions, each containing a hashmap of bindings
   */
  public List<HashMap<String, Value>> runSPARQL(String qs) {
    try {
      RepositoryConnection con = currentRepository.getConnection();
      try {
        TupleQuery query = con.prepareTupleQuery(org.openrdf.query.QueryLanguage.SPARQL, qs);
        TupleQueryResult qres = query.evaluate();
        ArrayList<HashMap<String, Value>> reslist = new ArrayList<HashMap<String, Value>>();
        while (qres.hasNext()) {
          BindingSet b = qres.next();

          Set<String> names = b.getBindingNames();
          HashMap<String, Value> hm = new HashMap<String, Value>();
          for (String n : names) {
            hm.put(n, b.getValue(n));
          }
          reslist.add(hm);
        }
        return reslist;
      } finally {
        con.close();
      }
    } catch (Exception e) {
      e.printStackTrace();
    }
    return null;
  }
  public void tupleQuery()
      throws QueryEvaluationException, RepositoryException, MalformedQueryException {

    // /query repo
    // con.setNamespace("onto",
    // "<http://it.unibz.krdb/obda/ontologies/test/translation/onto2.owl#>");
    // System.out.println(con.getNamespaces().next().toString());
    String queryString =
        "PREFIX : \n<http://it.unibz.krdb/obda/ontologies/test/translation/onto2.owl#>\n "
            + "SELECT ?x ?y WHERE { ?x a :Person. ?x :age ?y } ";
    // String queryString =
    // "SELECT ?x ?y WHERE { ?x a onto:Person. ?x onto:age ?y } ";
    TupleQuery tupleQuery = (con).prepareTupleQuery(QueryLanguage.SPARQL, queryString);
    TupleQueryResult result = tupleQuery.evaluate();

    System.out.println(result.getBindingNames());

    while (result.hasNext()) {
      BindingSet bindingSet = result.next();
      Value valueOfX = bindingSet.getValue("x");
      Literal valueOfY = (Literal) bindingSet.getValue("y");
      System.out.println(valueOfX.stringValue() + ", " + valueOfY.floatValue());
    }
    result.close();
  }
  @Override
  public CloseableIteration<BindingSet, QueryEvaluationException> evaluate(
      String sparqlQueryString,
      BindingSet bindings,
      String baseUri,
      QueryType type,
      Service service)
      throws QueryEvaluationException {
    RepositoryConnection conn = endpoint.getConn();
    try {
      TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, sparqlQueryString, baseUri);

      Iterator<Binding> bIter = bindings.iterator();
      while (bIter.hasNext()) {
        Binding b = bIter.next();
        if (service.getServiceVars().contains(b.getName()))
          query.setBinding(b.getName(), b.getValue());
      }

      TupleQueryResult qRes = query.evaluate();
      return new InsertBindingsIteration(qRes, bindings);
    } catch (OpenRDFException e) {
      throw new QueryEvaluationException(e);
    }
  }
 /**
  * @param queryString
  * @param mapper
  * @return
  * @throws RdfException
  */
 public <T> List<T> queryList(String queryString, BindingSetMapper<T> mapper) throws RdfException {
   RepositoryConnection repositoryConnection = null;
   TupleQueryResult result = null;
   List<T> objects = new LinkedList<T>();
   try {
     repositoryConnection = getConnection();
     TupleQuery query = repositoryConnection.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
     result = query.evaluate();
     int row = 0;
     while (result.hasNext()) {
       final T mapped = mapper.map(result.next(), row++);
       if (mapped != null) {
         objects.add(mapped);
       }
     }
   } catch (RepositoryException e) {
     throw new RdfException(e);
   } catch (MalformedQueryException e) {
     throw new RdfException(e);
   } catch (QueryEvaluationException e) {
     throw new RdfException(e);
   } finally {
     cleanup(result);
   }
   return objects;
 }
  private void calculateGraphsAndDataSets() {
    try {
      RepositoryConnection conn = repository.getConnection();
      TupleQuery q = conn.prepareTupleQuery(QueryLanguage.SPARQL, SparqlUtils.PREFIXES + QUERY);
      TupleQueryResult results = q.evaluate();
      graphs = new LinkedList<DataCubeGraph>();
      datasets = new LinkedList<DataSet>();
      String lastG = null;
      String lastDSD = null;
      SparqlDCGraph dcGraph;
      SparqlStructure dcStructure = null;
      Collection<DataSet> graphDataSets = null;
      Collection<Structure> graphStructures = null;
      Collection<DataSet> structDataSets = null;
      while (results.hasNext()) {
        BindingSet set = results.next();
        String g = set.getValue("g").stringValue();
        String ds = set.getValue("ds").stringValue();
        String dsd = set.getValue("dsd").stringValue();

        if (!g.equals(lastG)) {
          // new Graph
          dcGraph = new SparqlDCGraph(repository, g);
          graphDataSets = new LinkedList<DataSet>();
          graphStructures = new LinkedList<Structure>();
          dcGraph.setDatasets(graphDataSets);
          dcGraph.setStructures(graphStructures);
          graphs.add(dcGraph);
          // new structure
          dcStructure = new SparqlStructure(repository, dsd, g);
          structDataSets = new LinkedList<DataSet>();
          dcStructure.setDatasets(structDataSets);
          graphStructures.add(dcStructure);
        } else if (!dsd.equals(lastDSD)) {
          // new structure
          dcStructure = new SparqlStructure(repository, dsd, g);
          structDataSets = new LinkedList<DataSet>();
          dcStructure.setDatasets(structDataSets);
          graphStructures.add(dcStructure);
        }

        SparqlDataSet dcDataSet = new SparqlDataSet(repository, ds, g);
        dcDataSet.setStructure(dcStructure);
        graphDataSets.add(dcDataSet);
        structDataSets.add(dcDataSet);
        datasets.add(dcDataSet);

        lastG = g;
        lastDSD = dsd;
      }
    } catch (RepositoryException ex) {
      Logger.getLogger(SparqlDCRepository.class.getName()).log(Level.SEVERE, null, ex);
    } catch (MalformedQueryException ex) {
      Logger.getLogger(SparqlDCRepository.class.getName()).log(Level.SEVERE, null, ex);
    } catch (QueryEvaluationException ex) {
      Logger.getLogger(SparqlDCRepository.class.getName()).log(Level.SEVERE, null, ex);
    }
  }
Exemple #9
0
  /**
   * Evaluates the SPARQL SELECT query from the specified query pair on the repository.
   *
   * @param query Query pair.
   * @return Query result set.
   * @throws RepositoryException If no connection could be established, or connection fails.
   * @throws MalformedQueryException On query-related errors.
   * @throws QueryEvaluationException On query-related errors.
   */
  public SparqlResultSet query(QueryPair query)
      throws RepositoryException, MalformedQueryException, QueryEvaluationException {
    RepositoryConnection conn = repo.getConnection();
    TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query.getSparqlQuery());
    SparqlResultSet ret =
        new SparqlResultSet(tq.evaluate(), query.getName(), query.getSparqlEntityIdVariables());
    conn.close();

    return ret;
  }
  /**
   * * Assumes repository is already initialized. Assumes quesryString is a SPARQL query.
   *
   * @param queryString
   * @return Returns answer of queryString queried on rep.
   * @throws RepositoryException
   * @throws MalformedQueryException
   * @throws QueryEvaluationException
   */
  public static TupleQueryResult evaluateQuery(String queryString)
      throws RepositoryException, MalformedQueryException, QueryEvaluationException {
    /* input: assumes repository is already initialized. assumes queryString is a SPARQL query.
    output: queries rep with queryString. returns answer.*/
    TupleQueryResult result = null;

    // connect to repository
    RepositoryConnection conn = rep.getConnection();

    // evaluate given query
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
    result = tupleQuery.evaluate();
    return result;
  }
Exemple #11
0
  /**
   * Runs a SPARQL SELECT query (specified by a query string) on the repository.
   *
   * @param sparqlString SELECT query.
   * @return Sesame tuple query result set.
   * @throws RepositoryException If no connection could be established or connection fails.
   * @throws MalformedQueryException On query-related errors.
   * @throws QueryEvaluationException On query-related errors.
   */
  public List<BindingSet> plainSparqlQuery(String sparqlString)
      throws RepositoryException, MalformedQueryException, QueryEvaluationException {

    RepositoryConnection conn = repo.getConnection();

    TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, sparqlString);

    List<BindingSet> ret = new ArrayList<BindingSet>();
    TupleQueryResult res = query.evaluate();
    while (res.hasNext()) ret.add(res.next());

    conn.close();
    return ret;
  }
Exemple #12
0
 public List<String> getValuesFor(URI context, URI predicate) throws RepositoryException {
   RepositoryConnection con = repository.getConnection();
   List<String> values = new ArrayList<String>();
   try {
     TupleQuery query =
         con.prepareTupleQuery(
             QueryLanguage.SPARQL,
             "SELECT distinct ?v WHERE {GRAPH <" + context + "> {?d <" + predicate + "> ?v}}");
     TupleQueryResult statements = query.evaluate();
     while (statements.hasNext())
       values.add(statements.next().getBinding("v").getValue().stringValue());
     statements.close();
   } finally {
     con.close();
     return values;
   }
 }
Exemple #13
0
  @Override
  public int runQuery(Query q, int run) throws Exception {
    TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, q.getQuery());
    TupleQueryResult res = (TupleQueryResult) query.evaluate();
    int resCounter = 0;

    try {
      while (res.hasNext()) {
        if (isInterrupted()) throw new QueryEvaluationException("Thread has been interrupted.");
        BindingSet bindings = res.next();
        resCounter++;
        earlyResults.handleResult(bindings, resCounter);
      }
    } finally {
      res.close();
    }
    return resCounter;
  }
Exemple #14
0
  @Override
  public int runQueryDebug(Query q, int run, boolean showResult) throws Exception {
    TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL, q.getQuery());
    TupleQueryResult res = (TupleQueryResult) query.evaluate();
    int resCounter = 0;

    TupleQueryResultWriter writer = null;
    boolean writerStarted = false;

    if (showResult) {
      OutputStream results =
          new FileOutputStream(
              Config.getConfig().getBaseDir()
                  + "/result/"
                  + q.getIdentifier()
                  + "_"
                  + run
                  + ".csv");
      TupleQueryResultWriterFactory factory = new SPARQLResultsCSVWriterFactory();
      writer = factory.getWriter(results);
    }

    while (res.hasNext()) {
      if (isInterrupted()) throw new QueryEvaluationException("Thread has been interrupted.");
      BindingSet bindings = res.next();
      if (showResult) {
        if (!writerStarted) {
          writer.startQueryResult(res.getBindingNames());
          writerStarted = true;
        }

        writer.handleSolution(bindings);
      }

      resCounter++;
      earlyResults.handleResult(bindings, resCounter);
    }

    if (writerStarted) writer.endQueryResult();

    return resCounter;
  }
  @Override
  public List<SesameMatch> runQuery(final Query query, final String queryDefinition)
      throws RepositoryException, MalformedQueryException, QueryEvaluationException {
    final List<SesameMatch> results = new ArrayList<>();
    TupleQueryResult queryResults;

    tupleQuery = connection.prepareTupleQuery(QueryLanguage.SPARQL, queryDefinition);
    queryResults = tupleQuery.evaluate();
    try {
      while (queryResults.hasNext()) {
        final BindingSet bs = queryResults.next();
        final SesameMatch match = SesameMatch.createMatch(query, bs);
        results.add(match);
      }
    } finally {
      queryResults.close();
    }

    return results;
  }
  protected static String getManifestName(
      Repository manifestRep, RepositoryConnection con, String manifestFileURL)
      throws QueryEvaluationException, RepositoryException, MalformedQueryException {
    // Try to extract suite name from manifest file
    TupleQuery manifestNameQuery =
        con.prepareTupleQuery(
            QueryLanguage.SERQL,
            "SELECT ManifestName FROM {ManifestURL} rdfs:label {ManifestName}");
    manifestNameQuery.setBinding(
        "ManifestURL", manifestRep.getValueFactory().createURI(manifestFileURL));
    TupleQueryResult manifestNames = manifestNameQuery.evaluate();
    try {
      if (manifestNames.hasNext()) {
        return manifestNames.next().getValue("ManifestName").stringValue();
      }
    } finally {
      manifestNames.close();
    }

    // Derive name from manifest URL
    int lastSlashIdx = manifestFileURL.lastIndexOf('/');
    int secLastSlashIdx = manifestFileURL.lastIndexOf('/', lastSlashIdx - 1);
    return manifestFileURL.substring(secLastSlashIdx + 1, lastSlashIdx);
  }
 public String query(String queryString)
     throws RepositoryException, MalformedQueryException, QueryEvaluationException,
         TupleQueryResultHandlerException, RDFHandlerException, IOException {
   ByteArrayOutputStream bout = new ByteArrayOutputStream();
   RepositoryConnection con = getRepository().getConnection();
   SPARQLParser parser = new SPARQLParser();
   ParsedQuery parsedQuery = parser.parseQuery(queryString, null);
   if (parsedQuery instanceof ParsedTupleQuery) {
     SPARQLResultsXMLWriter writer = new SPARQLResultsXMLWriter(bout);
     TupleQuery query = con.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
     query.evaluate(writer);
   } else if (parsedQuery instanceof ParsedGraphQuery) {
     RDFXMLPrettyWriter writer = new RDFXMLPrettyWriter(bout);
     GraphQuery query = con.prepareGraphQuery(QueryLanguage.SPARQL, queryString);
     query.evaluate(writer);
   } else if (parsedQuery instanceof ParsedBooleanQuery) {
     BooleanQuery query = con.prepareBooleanQuery(QueryLanguage.SPARQL, queryString);
     boolean result = query.evaluate();
     String resultString = BOOLEAN_RESULT_RESULT_PREFIX + result + BOOLEAN_RESULT_RESULT_SUFFIX;
     bout.write(resultString.getBytes());
   }
   con.close();
   return bout.toString();
 }
  public void testInlineValuesLT() throws Exception {

    final BigdataSail sail = getSail();
    sail.initialize();
    final BigdataSailRepository repo = new BigdataSailRepository(sail);
    final BigdataSailRepositoryConnection cxn =
        (BigdataSailRepositoryConnection) repo.getConnection();
    cxn.setAutoCommit(false);

    try {

      final ValueFactory vf = sail.getValueFactory();

      URI A = vf.createURI("_:A");
      URI B = vf.createURI("_:B");
      URI X = vf.createURI("_:X");
      URI AGE = vf.createURI("_:AGE");
      Literal _25 = vf.createLiteral(25);
      Literal _45 = vf.createLiteral(45);

      cxn.add(A, RDF.TYPE, X);
      cxn.add(B, RDF.TYPE, X);
      cxn.add(A, AGE, _25);
      cxn.add(B, AGE, _45);

      /*
       * Note: The either flush() or commit() is required to flush the
       * statement buffers to the database before executing any operations
       * that go around the sail.
       */
      cxn.flush(); // commit();

      if (log.isInfoEnabled()) {
        log.info("\n" + sail.getDatabase().dumpStore());
      }

      {
        String query =
            "select ?s ?age "
                + "WHERE { "
                + "  ?s <"
                + RDF.TYPE
                + "> <"
                + X
                + "> . "
                + "  ?s <"
                + AGE
                + "> ?age . "
                + "  FILTER( ?age < 35 ) . "
                + "}";

        final TupleQuery tupleQuery = cxn.prepareTupleQuery(QueryLanguage.SPARQL, query);
        TupleQueryResult result = tupleQuery.evaluate();

        Collection<BindingSet> solution = new LinkedList<BindingSet>();
        solution.add(
            createBindingSet(new Binding[] {new BindingImpl("s", A), new BindingImpl("age", _25)}));

        compare(result, solution);
      }

    } finally {
      cxn.close();
      sail.__tearDownUnitTest();
    }
  }
  // sparql
  public void sparql() throws IOException, RepositoryException, QueryEvaluationException {

    // find all expired data to avoid them participating the query:
    this.toDeleteCounter = 0;
    this.dropQueryString = "";
    ArrayList<GraphIdCounterPair> expiredData = new ArrayList<GraphIdCounterPair>();
    LocalTime evictionTime = LocalTime.now();
    for (GraphIdCounterPair x : this.cacheContentOfGraphIds) {
      System.out.print(
          this.evictCounter
              + ", "
              + this.size
              + ", "
              + this.evictAmount
              + ", "
              + x.graphId
              + ", "
              + x.arrivalTime
              + ", "
              + x.expirationTime);
      if (x.expirationTime.isBefore(evictionTime)) {
        expiredData.add(x);
        dropQueryString += "drop graph <" + x.graphId + ">;";
        System.out.println(", expired");
        ++toDeleteCounter;
      } else {
        System.out.println();
      }
    }
    System.out.println("[INFO] " + expiredData.size() + " data expired!");
    if (!expiredData.isEmpty()) {
      // delete expired data from the cache
      for (GraphIdCounterPair x : expiredData) {
        this.cacheContentOfGraphIds.remove(x);
      }
      // delete the expired data from the database
      QueryExecution qe =
          AGQueryExecutionFactory.create(AGQueryFactory.create(dropQueryString), model);
      qe.execAsk();
      qe.close();
    }

    // after deleting expired data, load the cache again
    this.streamEmulation();
    System.out.println(this.reasoner.getEntailmentRegime());
    this.infModel = new AGInfModel(this.reasoner, this.model);
    String queryString =
        "select distinct ?s "
            + "where { ?s <http://www.w3.org/1999/02/22-rdf-syntax-ns#type>"
            + "<http://swat.cse.lehigh.edu/onto/univ-bench.owl#Professor>.}";
    AGRepositoryConnection conn = this.client.getAGConn();
    TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
    tupleQuery.setIncludeInferred(true);
    long sparqlStartTime = System.currentTimeMillis();
    TupleQueryResult resultSet = tupleQuery.evaluate();
    long sparqlEndTime = System.currentTimeMillis();
    this.aveSparql += (sparqlEndTime - sparqlStartTime);

    ArrayList<String> results = new ArrayList<String>();
    while (resultSet.hasNext()) {
      String result = resultSet.next().toString();
      System.out.println("result");
      int length = result.length();
      results.add(result.substring(3, length - 1));
    }
    resultSet.close();

    this.fMeasureBench(results);
    this.infModel.close();
  }
Exemple #20
0
  /** @param args */
  public static void main(String[] args) {
    Repository repo = null;
    org.openrdf.repository.RepositoryConnection conn = null;
    // check argument correctness
    if (args.length != 3 && args.length != 4) {
      System.out.println("Usage:");
      System.out.println(" QuestSesameCMD owlfile obdafile queryfile [outputfile]");
      System.out.println("");
      System.out.println(" owlfile    The full path to the OWL file");
      System.out.println(" obdafile   The full path to the OBDA file");
      System.out.println(" queryfile  The full path to the file with the SPARQL query");
      System.out.println(" outputfile [OPTIONAL] The full path to output file");
      System.out.println("");
      return;
    }

    // get parameter values
    String owlfile = args[0].trim();
    String obdafile = args[1].trim();
    String qfile = args[2].trim();
    String out = null;
    if (args.length == 4) out = args[3].trim();

    try {
      // create and initialize repo
      repo = new SesameVirtualRepo("test_repo", owlfile, obdafile, false, "TreeWitness");
      repo.initialize();
      conn = repo.getConnection();
      String querystr = "";
      // read query from file
      FileInputStream input = new FileInputStream(new File(qfile));
      byte[] fileData = new byte[input.available()];

      input.read(fileData);
      input.close();

      querystr = new String(fileData, "UTF-8");

      // execute query
      Query query = conn.prepareQuery(QueryLanguage.SPARQL, querystr);

      if (query instanceof TupleQuery) {
        TupleQuery tuplequery = (TupleQuery) query;
        TupleQueryResultHandler handler = null;

        // set handler to output file or printout
        if (out != null) {

          FileOutputStream output = new FileOutputStream(new File(out));
          handler = new SPARQLResultsTSVWriter(output);

        } else {

          handler = new SPARQLResultsTSVWriter(System.out);
        }
        // evaluate the query
        tuplequery.evaluate(handler);
      } else if (query instanceof GraphQuery) {
        GraphQuery tuplequery = (GraphQuery) query;
        Writer writer = null;
        // set handler to output file or printout
        if (out != null) {

          writer = new BufferedWriter(new FileWriter(new File(out)));

        } else {

          writer = new BufferedWriter(new OutputStreamWriter(System.out));
        }

        // evaluate the query
        RDFHandler handler = Rio.createWriter(RDFFormat.TURTLE, writer);
        //
        //	conn.exportStatements(ValueFactoryImpl.getInstance().createURI("http://meraka/moss/exampleBooks.owl#author/내용/"), null, null, true, handler, null);

        tuplequery.evaluate(handler);

      } else {
        System.out.println("Boolean queries are not supported in this script yet.");
      }

    } catch (Exception e) {
      System.out.println("Error executing query:");
      e.printStackTrace();
    } finally {
      try {
        conn.close();
      } catch (Exception e1) {

      }

      try {
        repo.shutDown();
      } catch (Exception e1) {

      }
    }
  }
  protected void getTerms(
      Repository repos,
      String name,
      String uri,
      List<RDFSClass> classes,
      List<RDFSProperty> properties)
      throws VocabularyImportException {
    try {
      RepositoryConnection con = repos.getConnection();
      try {

        TupleQuery query =
            con.prepareTupleQuery(QueryLanguage.SPARQL, CLASSES_QUERY_P1 + uri + CLASSES_QUERY_P2);
        TupleQueryResult res = query.evaluate();

        Set<String> seen = new HashSet<String>();
        while (res.hasNext()) {
          BindingSet solution = res.next();
          String clazzURI = solution.getValue("resource").stringValue();
          if (seen.contains(clazzURI)) {
            continue;
          }
          seen.add(clazzURI);
          String label =
              getFirstNotNull(
                  new Value[] {solution.getValue("en_label"), solution.getValue("label")});
          String description =
              getFirstNotNull(
                  new Value[] {
                    solution.getValue("en_definition"),
                    solution.getValue("definition"),
                    solution.getValue("en_description"),
                    solution.getValue("description")
                  });
          RDFSClass clazz = new RDFSClass(clazzURI, label, description, name, uri);
          classes.add(clazz);
        }

        query =
            con.prepareTupleQuery(
                QueryLanguage.SPARQL, PROPERTIES_QUERY_P1 + uri + PROPERTIES_QUERY_P2);
        res = query.evaluate();
        seen = new HashSet<String>();
        while (res.hasNext()) {
          BindingSet solution = res.next();
          String propertyUri = solution.getValue("resource").stringValue();
          if (seen.contains(propertyUri)) {
            continue;
          }
          seen.add(propertyUri);
          String label =
              getFirstNotNull(
                  new Value[] {solution.getValue("en_label"), solution.getValue("label")});
          String description =
              getFirstNotNull(
                  new Value[] {
                    solution.getValue("en_definition"),
                    solution.getValue("definition"),
                    solution.getValue("en_description"),
                    solution.getValue("description")
                  });
          RDFSProperty prop = new RDFSProperty(propertyUri, label, description, name, uri);
          properties.add(prop);
        }

      } catch (Exception ex) {
        throw new VocabularyImportException(
            "Error while processing vocabulary retrieved from " + uri, ex);
      } finally {
        con.close();
      }
    } catch (RepositoryException ex) {
      throw new VocabularyImportException(
          "Error while processing vocabulary retrieved from " + uri, ex);
    }
  }
  public static TestSuite suite(String manifestFileURL, Factory factory, boolean approvedOnly)
      throws Exception {
    logger.info("Building test suite for {}", manifestFileURL);

    TestSuite suite = new TestSuite(factory.getClass().getName());

    // Read manifest and create declared test cases
    Repository manifestRep = new SailRepository(new MemoryStore());
    manifestRep.initialize();
    RepositoryConnection con = manifestRep.getConnection();

    ManifestTest.addTurtle(con, new URL(manifestFileURL), manifestFileURL);

    suite.setName(getManifestName(manifestRep, con, manifestFileURL));

    // Extract test case information from the manifest file. Note that we
    // only
    // select those test cases that are mentioned in the list.
    StringBuilder query = new StringBuilder(512);
    query.append(
        " SELECT DISTINCT testURI, testName, resultFile, action, queryFile, defaultGraph, ordered ");
    query.append(" FROM {} rdf:first {testURI} ");
    if (approvedOnly) {
      query.append("                          dawgt:approval {dawgt:Approved}; ");
    }
    query.append("                             mf:name {testName}; ");
    query.append("                             mf:result {resultFile}; ");
    query.append("                             [ mf:checkOrder {ordered} ]; ");
    query.append("                             [ mf:requires {Requirement} ];");
    query.append("                             mf:action {action} qt:query {queryFile}; ");
    query.append("                                               [qt:data {defaultGraph}]; ");
    query.append("                                               [sd:entailmentRegime {Regime} ]");

    // skip tests involving CSV result files, these are not query tests
    query.append(" WHERE NOT resultFile LIKE \"*.csv\" ");
    // skip tests involving JSON, sesame currently does not have a
    // SPARQL/JSON
    // parser.
    query.append(" AND NOT resultFile LIKE \"*.srj\" ");
    // skip tests involving entailment regimes
    query.append(" AND NOT BOUND(Regime) ");
    // skip test involving basic federation, these are tested separately.
    query.append(" AND (NOT BOUND(Requirement) OR (Requirement != mf:BasicFederation)) ");
    query.append(" USING NAMESPACE ");
    query.append("  mf = <http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#>, ");
    query.append("  dawgt = <http://www.w3.org/2001/sw/DataAccess/tests/test-dawg#>, ");
    query.append("  qt = <http://www.w3.org/2001/sw/DataAccess/tests/test-query#>, ");
    query.append("  sd = <http://www.w3.org/ns/sparql-service-description#>, ");
    query.append("  ent = <http://www.w3.org/ns/entailment/> ");
    TupleQuery testCaseQuery = con.prepareTupleQuery(QueryLanguage.SERQL, query.toString());

    query.setLength(0);
    query.append(" SELECT graph ");
    query.append(" FROM {action} qt:graphData {graph} ");
    query.append(" USING NAMESPACE ");
    query.append(" qt = <http://www.w3.org/2001/sw/DataAccess/tests/test-query#>");
    TupleQuery namedGraphsQuery = con.prepareTupleQuery(QueryLanguage.SERQL, query.toString());

    query.setLength(0);
    query.append("SELECT 1 ");
    query.append(" FROM {testURI} mf:resultCardinality {mf:LaxCardinality}");
    query.append(
        " USING NAMESPACE mf = <http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#>");
    TupleQuery laxCardinalityQuery = con.prepareTupleQuery(QueryLanguage.SERQL, query.toString());

    logger.debug("evaluating query..");
    TupleQueryResult testCases = testCaseQuery.evaluate();
    while (testCases.hasNext()) {
      BindingSet bindingSet = testCases.next();

      URI testURI = (URI) bindingSet.getValue("testURI");
      String testName = bindingSet.getValue("testName").toString();
      String resultFile = bindingSet.getValue("resultFile").toString();
      String queryFile = bindingSet.getValue("queryFile").toString();
      URI defaultGraphURI = (URI) bindingSet.getValue("defaultGraph");
      Value action = bindingSet.getValue("action");
      Value ordered = bindingSet.getValue("ordered");

      logger.debug("found test case : {}", testName);

      // Query named graphs
      namedGraphsQuery.setBinding("action", action);
      TupleQueryResult namedGraphs = namedGraphsQuery.evaluate();

      DatasetImpl dataset = null;

      if (defaultGraphURI != null || namedGraphs.hasNext()) {
        dataset = new DatasetImpl();

        if (defaultGraphURI != null) {
          dataset.addDefaultGraph(defaultGraphURI);
        }

        while (namedGraphs.hasNext()) {
          BindingSet graphBindings = namedGraphs.next();
          URI namedGraphURI = (URI) graphBindings.getValue("graph");
          logger.debug(" adding named graph : {}", namedGraphURI);
          dataset.addNamedGraph(namedGraphURI);
        }
      }

      // Check for lax-cardinality conditions
      boolean laxCardinality = false;
      laxCardinalityQuery.setBinding("testURI", testURI);
      TupleQueryResult laxCardinalityResult = laxCardinalityQuery.evaluate();
      try {
        laxCardinality = laxCardinalityResult.hasNext();
      } finally {
        laxCardinalityResult.close();
      }

      // if this is enabled, Sesame passes all tests, showing that the
      // only
      // difference is the semantics of arbitrary-length
      // paths
      /*
       * if (!laxCardinality) { // property-path tests always with lax
       * cardinality because Sesame filters out duplicates by design if
       * (testURI.stringValue().contains("property-path")) {
       * laxCardinality = true; } }
       */

      // check if we should test for query result ordering
      boolean checkOrder = false;
      if (ordered != null) {
        checkOrder = Boolean.parseBoolean(ordered.stringValue());
      }

      SPARQLQueryTest test =
          factory.createSPARQLQueryTest(
              testURI.toString(),
              testName,
              queryFile,
              resultFile,
              dataset,
              laxCardinality,
              checkOrder);
      if (test != null) {
        suite.addTest(test);
      }
    }

    testCases.close();
    con.close();

    manifestRep.shutDown();
    logger.info("Created test suite with " + suite.countTestCases() + " test cases.");
    return suite;
  }
  public void test_query() throws Exception {

    final BigdataSail sail = getSail();

    try {

      sail.initialize();

      if (!((BigdataSail) sail).database.getStatementIdentifiers()) {

        log.warn("Statement identifiers are not enabled");

        return;
      }

      /*
       * Load data into the sail.
       */
      {
        final DataLoader dataLoader = sail.database.getDataLoader();

        dataLoader.loadData(
            "/com/bigdata/rdf/sail/provenance01.ttlx",
            "" /*baseURL*/,
            ServiceProviderHook.TURTLE_RDR);
      }

      /*
       * Serialize as RDF/XML.
       *
       * Note: This is just for debugging.
       */
      if (log.isInfoEnabled()) {

        final BigdataStatementIterator itr = sail.database.getStatements(null, null, null);
        final String rdfXml;
        try {

          final Writer w = new StringWriter();

          //                final RDFXMLWriter rdfWriter = new RDFXMLWriter(w);
          final RDFWriterFactory writerFactory =
              RDFWriterRegistry.getInstance().get(RDFFormat.RDFXML);

          assertNotNull(writerFactory);

          final RDFWriter rdfWriter = writerFactory.getWriter(w);

          rdfWriter.startRDF();

          while (itr.hasNext()) {

            final BigdataStatementImpl stmt = (BigdataStatementImpl) itr.next();

            // only write the explicit statements.
            if (!stmt.isExplicit()) continue;

            rdfWriter.handleStatement(stmt);
          }

          rdfWriter.endRDF();

          rdfXml = w.toString();

        } finally {

          itr.close();
        }

        // write the rdf/xml
        log.info(rdfXml);
      }

      final SailConnection conn = sail.getConnection();

      try {

        final URI y = new URIImpl("http://www.foo.org/y");

        final URI B = new URIImpl("http://www.foo.org/B");

        final URI dcCreator = new URIImpl("http://purl.org/dc/terms/creator");

        final Literal bryan = new LiteralImpl("bryan");

        final Literal mike = new LiteralImpl("mike");

        /*
         * This is a hand-coded query.
         *
         * Note: When statement identifiers are enabled, the only way to
         * bind the context position is to already have a statement on hand -
         * there is no index which can be used to look up a statement by its
         * context and the context is always a blank node.
         */

        //            final TupleExpr tupleExpr =
        //                new Projection(
        //                new Join(//
        //                    new StatementPattern(//
        //                            new Var("X", y),//
        //                            new Var("1", RDF.TYPE),//
        //                            new Var("2", B),//
        //                            new Var("SID")),// unbound.
        //                    new StatementPattern(//
        //                            new Var("SID"),//
        //                            new Var("3", dcCreator),//
        //                            new Var("Y"))),
        //                new ProjectionElemList(new ProjectionElem[] { new ProjectionElem( "Y"
        // )}));

        //            final String q = "select ?Y where { ?SID <"+dcCreator+"> ?Y . graph ?SID {
        // <"+y+"> <"+RDF.TYPE+"> <"+B+"> . } }";
        final String q =
            "select ?Y where { <<<"
                + y
                + "> <"
                + RDF.TYPE
                + "> <"
                + B
                + ">>> <"
                + dcCreator
                + "> ?Y . }";

        /*
         * Create a data set consisting of the contexts to be queried.
         *
         * Note: a [null] DataSet will cause context to be ignored when the
         * query is processed.
         */
        //            final DatasetImpl dataSet = null; //new DatasetImpl();
        //
        //            final BindingSet bindingSet = new QueryBindingSet();
        //
        //            final CloseableIteration<? extends BindingSet, QueryEvaluationException> itr =
        // conn
        //                    .evaluate(tupleExpr, dataSet, bindingSet, true/* includeInferred */);

        final TupleQuery tq =
            new BigdataSailRepository(sail)
                .getReadOnlyConnection()
                .prepareTupleQuery(QueryLanguage.SPARQL, q);

        final TupleQueryResult itr = tq.evaluate();

        if (log.isInfoEnabled()) log.info("Verifying query.");

        /*
         * These are the expected results for the query (the bindings for Y).
         */

        final Set<Value> expected = new HashSet<Value>();

        expected.add(bryan);

        expected.add(mike);

        /*
         * Verify that the query results is the correct solutions.
         */

        final int nresults = expected.size();

        try {

          int i = 0;

          while (itr.hasNext()) {

            final BindingSet solution = itr.next();

            if (log.isInfoEnabled()) log.info("solution[" + i + "] : " + solution);

            final Value actual = solution.getValue("Y");

            assertTrue("Not expecting Y=" + actual, expected.remove(actual));

            i++;
          }

          assertEquals("#results", nresults, i);

        } finally {

          itr.close();
        }

      } finally {

        conn.close();
      }

    } finally {

      sail.__tearDownUnitTest();
    }
  }