コード例 #1
0
ファイル: LazyQueryView.java プロジェクト: vitlav/siu
 /**
  * Gets current query or constructs one on demand.
  *
  * @return The current query.
  */
 private Query getQuery() {
   if (query == null) {
     query = queryFactory.constructQuery(sortPropertyIds, ascendingStates);
     queryCount++;
   }
   return query;
 }
コード例 #2
0
 private static Query toQuery(Element pattern) {
   Query query = QueryFactory.make();
   query.setQueryPattern(pattern);
   query.setQuerySelectType();
   query.setQueryResultStar(true);
   return query;
 }
コード例 #3
0
ファイル: MdsExtractor.java プロジェクト: stetro/MDS-Viewer
  public List<ResourceName> queryMDS(PackageName packageName) {
    MDSInstance mdsInstance;
    List<ResourceName> resources = new ArrayList<ResourceName>();
    try {
      mdsInstance = storeInitializer.getMDSInstance();
      packageName = (packageName == null) ? PackageName.createPackageName("/") : packageName;
      NameCondition condition =
          ConditionFactory.createNameCondition(packageName.getAbsoluteName(), "%");
      ResourceQuery query = QueryFactory.createResourceQuery(mdsInstance, condition);

      Iterator<QueryResult> contents = query.execute();
      if (contents == null) {
        return resources;
      }
      while (contents.hasNext()) {
        QueryResult result = contents.next();
        if (result.getResultType() == QueryResult.ResultType.PACKAGE_RESULT) {
          PackageResult pack = (PackageResult) result;
          resources.add(pack.getPackageName());
        } else {
          DocumentResult doc = (DocumentResult) result;
          resources.add(doc.getDocumentName());
        }
      }
      return resources;
    } catch (MDSAccessException e) {
      throw new IllegalArgumentException("Could not connect to MDS, check login data", e);
    } catch (InvalidReferenceException e) {
      throw new IllegalArgumentException("Could not find/read " + packageName, e);
    } catch (InvalidReferenceTypeException e) {
      throw new IllegalArgumentException("No correct type!", e);
    }
  }
コード例 #4
0
  /**
   * Queries the {@link #rdfEndpoint(String)} with each of the {@link #rdfQueries} and harvests the
   * results of the query.
   */
  private void harvestFromEndpoint() {

    Query query;
    QueryExecution qExec;

    for (String rdfQuery : rdfQueries) {
      if (closed) break;

      logger.info(
          "Harvesting with query: [{}] on index [{}] and type [{}]", rdfQuery, indexName, typeName);

      try {
        query = QueryFactory.create(rdfQuery);
      } catch (QueryParseException qpe) {
        logger.error("Could not parse [{}]. Please provide a relevant query. {}", rdfQuery, qpe);
        continue;
      }

      qExec = QueryExecutionFactory.sparqlService(rdfEndpoint, query);

      try {
        harvest(qExec);
      } catch (Exception e) {
        logger.error("Exception [{}] occurred while harvesting", e.getLocalizedMessage());
      } finally {
        qExec.close();
      }
    }
  }
コード例 #5
0
 /**
  * Create a QueryExecution that will access a SPARQL service over HTTP
  *
  * @param service URL of the remote service
  * @param query Query string to execute
  * @param defaultGraphURIs List of URIs to make up the default graph
  * @param namedGraphURIs List of URIs to make up the named graphs
  * @return QueryExecution
  */
 public static QueryExecution sparqlService(
     String service, String query, List<String> defaultGraphURIs, List<String> namedGraphURIs) {
   checkNotNull(service, "URL for service is null");
   // checkNotNull(defaultGraphURIs, "List of default graph URIs is null") ;
   // checkNotNull(namedGraphURIs, "List of named graph URIs is null") ;
   checkArg(query);
   return sparqlService(service, QueryFactory.create(query), defaultGraphURIs, namedGraphURIs);
 }
コード例 #6
0
  @BeforeMethod
  public void setup() {
    mockFactory = mock(QueryFactory.class);
    mockQueryFilter = mock(QueryFilter.class);
    given(mockFactory.createFilter()).willReturn(mockQueryFilter);

    mockAndBuilder = mock(QueryFilter.QueryFilterBuilder.class);
    mockOrBuilder = mock(QueryFilter.QueryFilterBuilder.class);
    given(mockQueryFilter.and()).willReturn(mockAndBuilder);
    given(mockQueryFilter.or()).willReturn(mockOrBuilder);
  }
コード例 #7
0
 @Test
 public void testCreatingFilterPatternQuery() {
   Query query = QueryFactory.createQuery();
   query.from(
       QueryFactory.sequenceStream(
           Sequence.next(
               QueryFactory.inputStream("e1", "Stream1"),
               QueryFactory.inputStream("e2", "Stream1")),
           Expression.value(2000)));
   query.insertInto("OutStream");
   query.select(
       QueryFactory.outputSelector()
           .select("symbol", Expression.variable("e1", "symbol"))
           .select("avgPrice", "avg", Expression.variable("e2", 0, "price"))
           .groupBy("e1", "symbol")
           .having(
               Condition.compare(
                   Expression.variable("avgPrice"),
                   Condition.Operator.GREATER_THAN,
                   Expression.value(50))));
 }
コード例 #8
0
  /**
   * Tests whether a query gives the same results when run both with and without a given optimizer
   *
   * @param queryStr Query
   * @param ds Dataset
   * @param opt Optimizer
   * @param expected Expected number of results
   */
  public static void test(String queryStr, Dataset ds, Symbol opt, int expected) {
    Query q = QueryFactory.create(queryStr);

    if (!q.isSelectType()) Assert.fail("Only SELECT queries are testable with this method");

    Op op = Algebra.compile(q);
    // Track current state
    boolean isEnabled = ARQ.isTrue(opt);
    boolean isDisabled = ARQ.isFalse(opt);

    try {
      // Run first without optimization
      ARQ.set(opt, false);
      ResultSetRewindable rs;
      try (QueryExecution qe = QueryExecutionFactory.create(q, ds)) {
        rs = ResultSetFactory.makeRewindable(qe.execSelect());
        if (expected != rs.size()) {
          System.err.println("Non-optimized results not as expected");
          TextOutput output = new TextOutput((SerializationContext) null);
          output.format(System.out, rs);
          rs.reset();
        }
        Assert.assertEquals(expected, rs.size());
      }

      // Run with optimization
      ARQ.set(opt, true);
      ResultSetRewindable rsOpt;
      try (QueryExecution qeOpt = QueryExecutionFactory.create(q, ds)) {
        rsOpt = ResultSetFactory.makeRewindable(qeOpt.execSelect());
        if (expected != rsOpt.size()) {
          System.err.println("Optimized results not as expected");
          TextOutput output = new TextOutput((SerializationContext) null);
          output.format(System.out, rsOpt);
          rsOpt.reset();
        }
        Assert.assertEquals(expected, rsOpt.size());
      }
      Assert.assertTrue(ResultSetCompare.isomorphic(rs, rsOpt));
    } finally {
      // Restore previous state
      if (isEnabled) {
        ARQ.set(opt, true);
      } else if (isDisabled) {
        ARQ.set(opt, false);
      } else {
        ARQ.unset(opt);
      }
    }
  }
コード例 #9
0
  @Test
  public void testSelectToWurcsSparql() throws SparqlException, UnsupportedEncodingException {
    GlycoSequenceToWurcsSelectSparql s = new GlycoSequenceToWurcsSelectSparql("glycoct");
    SparqlEntity se = new SparqlEntity();
    se.setValue(
        GlycoSequenceToWurcsSelectSparql.FromSequence,
        "RES\n1b:a-dgal-HEX-1:5\n2s:n-acetyl\n3b:b-dgal-HEX-1:5\n4b:b-dglc-HEX-1:5\n5s:n-acetyl\n6b:b-dgal-HEX-1:5\n7b:a-lgal-HEX-1:5|6:d\n8b:b-dglc-HEX-1:5\n9s:n-acetyl\n10b:b-dglc-HEX-1:5\n11s:n-acetyl\n12b:b-dgal-HEX-1:5\n13b:a-lgal-HEX-1:5|6:d\nLIN\n1:1d(2+1)2n\n2:1o(3+1)3d\n3:3o(3+1)4d\n4:4d(2+1)5n\n5:4o(4+1)6d\n6:6o(2+1)7d\n7:3o(6+1)8d\n8:8d(2+1)9n\n9:1o(6+1)10d\n10:10d(2+1)11n\n11:10o(4+1)12d\n12:12o(2+1)13d"
            .replaceAll("\n", "\\\\n"));
    s.setSparqlEntity(se);
    logger.debug(s.getSparql());
    Query query =
        QueryFactory.create(s.getSparql().replaceAll("null", "").replace("?Sequence", ""));
    //        QueryExecution qe =
    // QueryExecutionFactory.sparqlService("http://localhost:3030/glycobase/query",query);
    QueryExecution qe =
        QueryExecutionFactory.sparqlService("http://test.ts.glytoucan.org/sparql", query);
    ResultSet rs = qe.execSelect();

    List<SparqlEntity> results = new ArrayList<SparqlEntity>();

    while (rs.hasNext()) {
      QuerySolution row = rs.next();
      Iterator<String> columns = row.varNames();
      SparqlEntity se2 = new SparqlEntity();
      while (columns.hasNext()) {
        String column = columns.next();
        RDFNode cell = row.get(column);

        if (cell.isResource()) {
          Resource resource = cell.asResource();
          // do something maybe with the OntModel???
          if (resource.isLiteral()) se.setValue(column, resource.asLiteral().getString());
          else se.setValue(column, resource.toString());
        } else if (cell.isLiteral()) {
          se.setValue(column, cell.asLiteral().getString());
        } else if (cell.isAnon()) {
          se.setValue(column, "anon");
        } else {
          se.setValue(column, cell.toString());
        }
      }
      results.add(se);
    }

    for (SparqlEntity entity : results) {
      System.out.println("results: " + entity.getValue("PrimaryId"));
    }
  }
コード例 #10
0
ファイル: SPARQL_Query.java プロジェクト: johnulist/jena
  protected void execute(String queryString, HttpAction action) {
    String queryStringLog = ServletOps.formatForLog(queryString);
    if (action.verbose) action.log.info(format("[%d] Query = \n%s", action.id, queryString));
    else action.log.info(format("[%d] Query = %s", action.id, queryStringLog));

    Query query = null;
    try {
      // NB syntax is ARQ (a superset of SPARQL)
      query = QueryFactory.create(queryString, QueryParseBase, Syntax.syntaxARQ);
      queryStringLog = formatForLog(query);
      validateQuery(action, query);
    } catch (ActionErrorException ex) {
      throw ex;
    } catch (QueryParseException ex) {
      ServletOps.errorBadRequest(
          "Parse error: \n" + queryString + "\n\r" + messageForQueryException(ex));
    }
    // Should not happen.
    catch (QueryException ex) {
      ServletOps.errorBadRequest("Error: \n" + queryString + "\n\r" + ex.getMessage());
    }

    // Assumes finished whole thing by end of sendResult.
    try {
      action.beginRead();
      Dataset dataset = decideDataset(action, query, queryStringLog);
      try (QueryExecution qExec = createQueryExecution(query, dataset); ) {
        SPARQLResult result = executeQuery(action, qExec, query, queryStringLog);
        // Deals with exceptions itself.
        sendResults(action, result, query.getPrologue());
      }
    } catch (QueryParseException ex) {
      // Late stage static error (e.g. bad fixed Lucene query string).
      ServletOps.errorBadRequest(
          "Query parse error: \n" + queryString + "\n\r" + messageForQueryException(ex));
    } catch (QueryCancelledException ex) {
      // Additional counter information.
      incCounter(action.getEndpoint().getCounters(), QueryTimeouts);
      throw ex;
    } finally {
      action.endRead();
    }
  }
コード例 #11
0
  @Test
  public void testSparql() {

    String queryStr = "select distinct ?Concept where {[] a ?Concept} LIMIT 10";
    Query query = QueryFactory.create(queryStr);

    // Remote execution.
    try (QueryExecution qexec =
        QueryExecutionFactory.sparqlService("http://dbpedia.org/sparql", query)) {
      // Set the DBpedia specific timeout.
      ((QueryEngineHTTP) qexec).addParam("timeout", "10000");

      // Execute.
      ResultSet rs = qexec.execSelect();
      ResultSetFormatter.out(System.out, rs, query);
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
コード例 #12
0
  /**
   * Returns the string value of the first of the properties in the uriDescriptionList for the given
   * resource (as an URI). In case the resource does not have any of the properties mentioned, its
   * URI is returned. The value is obtained by querying the endpoint and the endpoint is queried
   * repeatedly until it gives a response (value or the lack of it)
   *
   * <p>It is highly recommended that the list contains properties like labels or titles, with test
   * values.
   *
   * @param uri - the URI for which a label is required
   * @return a String value, either a label for the parameter or its value if no label is obtained
   *     from the endpoint
   */
  private String getLabelForUri(String uri) {
    String result;

    if (uriLabelCache.containsKey(uri)) {
      return uriLabelCache.get(uri);
    }

    for (String prop : uriDescriptionList) {
      String innerQuery = "SELECT ?r WHERE {<" + uri + "> <" + prop + "> ?r } LIMIT 1";

      try {
        Query query = QueryFactory.create(innerQuery);
        QueryExecution qExec = QueryExecutionFactory.sparqlService(rdfEndpoint, query);
        boolean keepTrying = true;
        while (keepTrying) {
          keepTrying = false;
          try {
            ResultSet results = qExec.execSelect();

            if (results.hasNext()) {
              QuerySolution sol = results.nextSolution();
              result = EEASettings.parseForJson(sol.getLiteral("r").getLexicalForm());
              if (!result.isEmpty()) {
                uriLabelCache.put(uri, result);
                return result;
              }
            }
          } catch (Exception e) {
            keepTrying = true;
            logger.warn("Could not get label for uri {}. Retrying.", uri);
          } finally {
            qExec.close();
          }
        }
      } catch (QueryParseException qpe) {
        logger.error("Exception for query {}. The label cannot be obtained", innerQuery);
      }
    }
    return uri;
  }
コード例 #13
0
  @Test
  public void testDOQuery2() {

    String queryString =
        "prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n"
            + "prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n"
            + "prefix owl: <http://www.w3.org/2002/07/owl#>\n"
            + "\n"
            + "select *\n"
            + "from <http://purl.obolibrary.org/obo/merged/DOID>\n"
            + "\n"
            + "WHERE {\n"
            + "   <http://purl.obolibrary.org/obo/DOID_1485> <http://www.w3.org/2000/01/rdf-schema#subClassOf> ?o\n"
            + "}";

    Query query = QueryFactory.create(queryString);
    System.out.println("String: " + queryString);
    QueryExecution qExe =
        QueryExecutionFactory.sparqlService("http://sparql.hegroup.org/sparql/", query);
    ResultSet results = qExe.execSelect();
    ResultSetFormatter.out(System.out, results, query);
  }
コード例 #14
0
  /**
   * Get a set of unique queryObjName returned from a select query
   *
   * <p>Used to retrieve sets of modified objects used in sync
   *
   * @param rdfQuery query to execute
   * @param queryObjName name of the object returned
   * @return set of values for queryObjectName in the rdfQuery result
   */
  HashSet<String> executeSyncQuery(String rdfQuery, String queryObjName) {
    HashSet<String> rdfUrls = new HashSet<String>();

    Query query;
    try {
      query = QueryFactory.create(rdfQuery);
    } catch (QueryParseException qpe) {
      logger.warn(
          "Could not parse [{}]. Please provide a relevant query. {}",
          rdfQuery,
          qpe.getLocalizedMessage());
      return null;
    }

    QueryExecution qExec = QueryExecutionFactory.sparqlService(rdfEndpoint, query);
    try {
      ResultSet results = qExec.execSelect();

      while (results.hasNext()) {
        QuerySolution sol = results.nextSolution();
        try {
          String value = sol.getResource(queryObjName).toString();
          rdfUrls.add(value);
        } catch (NoSuchElementException e) {
          logger.error("Encountered a NoSuchElementException: " + e.getLocalizedMessage());
          return null;
        }
      }
    } catch (Exception e) {
      logger.error(
          "Encountered a [{}] while querying the endpoint for sync", e.getLocalizedMessage());
      return null;
    } finally {
      qExec.close();
    }

    return rdfUrls;
  }
コード例 #15
0
ファイル: Ex1.java プロジェクト: rhasan/query-performance
  public static void main(String... argv) {
    String queryString = "SELECT * { ?s ?p ?o }";
    Query query = QueryFactory.create(queryString);
    Store store = SDBFactory.connectStore("sdb.ttl");

    // Must be a DatasetStore to trigger the SDB query engine.
    // Creating a graph from the Store, and adding it to a general
    // purpose dataset will not necesarily exploit full SQL generation.
    // The right answers will be obtained but slowly.

    Dataset ds = DatasetStore.create(store);
    QueryExecution qe = QueryExecutionFactory.create(query, ds);
    try {
      ResultSet rs = qe.execSelect();
      ResultSetFormatter.out(rs);
    } finally {
      qe.close();
    }

    // Close the SDB conenction which also closes the underlying JDBC connection.
    store.getConnection().close();
    store.close();
  }
コード例 #16
0
 /**
  * Create a QueryExecution that will access a SPARQL service over HTTP
  *
  * @param service URL of the remote service
  * @param query Query string to execute
  * @param defaultGraph URI of the default graph
  * @return QueryExecution
  */
 public static QueryExecution sparqlService(String service, String query, String defaultGraph) {
   checkNotNull(service, "URL for service is null");
   // checkNotNull(defaultGraph, "IRI for default graph is null") ;
   checkArg(query);
   return sparqlService(service, QueryFactory.create(query), defaultGraph);
 }
コード例 #17
0
 private static Query makeQuery(String queryStr, Syntax syntax) {
   return QueryFactory.create(queryStr, syntax);
 }
コード例 #18
0
  @Override
  public EntityDefinition open(Assembler a, Resource root, Mode mode) {
    String prologue = "PREFIX : <" + NS + ">   PREFIX list: <http://jena.apache.org/ARQ/list#> ";
    Model model = root.getModel();

    String qs1 =
        StrUtils.strjoinNL(
            prologue,
            "SELECT * {",
            "  ?eMap  :entityField  ?entityField ;",
            "         :map ?map ;",
            "         :defaultField ?dftField .",
            "  OPTIONAL {",
            "    ?eMap :graphField ?graphField",
            "  }",
            "  OPTIONAL {",
            "    ?eMap :langField ?langField",
            "  }",
            "  OPTIONAL {",
            "    ?eMap :uidField ?uidField",
            "  }",
            "}");
    ParameterizedSparqlString pss = new ParameterizedSparqlString(qs1);
    pss.setIri("eMap", root.getURI());

    Query query1 = QueryFactory.create(pss.toString());
    QueryExecution qexec1 = QueryExecutionFactory.create(query1, model);
    ResultSet rs1 = qexec1.execSelect();
    List<QuerySolution> results = ResultSetFormatter.toList(rs1);
    if (results.size() == 0) {
      Log.warn(this, "Failed to find a valid EntityMap for : " + root);
      throw new TextIndexException("Failed to find a valid EntityMap for : " + root);
    }

    if (results.size() != 1) {
      Log.warn(this, "Multiple matches for EntityMap for : " + root);
      throw new TextIndexException("Multiple matches for EntityMap for : " + root);
    }

    QuerySolution qsol1 = results.get(0);
    String entityField = qsol1.getLiteral("entityField").getLexicalForm();
    String graphField =
        qsol1.contains("graphField") ? qsol1.getLiteral("graphField").getLexicalForm() : null;
    String langField =
        qsol1.contains("langField") ? qsol1.getLiteral("langField").getLexicalForm() : null;
    String defaultField =
        qsol1.contains("dftField") ? qsol1.getLiteral("dftField").getLexicalForm() : null;
    String uniqueIdField =
        qsol1.contains("uidField") ? qsol1.getLiteral("uidField").getLexicalForm() : null;

    Multimap<String, Node> mapDefs = HashMultimap.create();
    Map<String, Analyzer> analyzerDefs = new HashMap<>();

    Statement listStmt = root.getProperty(TextVocab.pMap);
    while (listStmt != null) {
      RDFNode n = listStmt.getObject();
      if (!n.isResource()) {
        throw new TextIndexException("Text list node is not a resource : " + n);
      }
      Resource listResource = n.asResource();
      if (listResource.equals(RDF.nil)) {
        break; // end of the list
      }

      Statement listEntryStmt = listResource.getProperty(RDF.first);
      if (listEntryStmt == null) {
        throw new TextIndexException("Text map list is not well formed.  No rdf:first property");
      }
      n = listEntryStmt.getObject();
      if (!n.isResource()) {
        throw new TextIndexException("Text map list entry is not a resource : " + n);
      }
      Resource listEntry = n.asResource();

      Statement fieldStatement = listEntry.getProperty(TextVocab.pField);
      if (fieldStatement == null) {
        throw new TextIndexException("Text map entry has no field property");
      }
      n = fieldStatement.getObject();
      if (!n.isLiteral()) {
        throw new TextIndexException("Text map entry field property has no literal value : " + n);
      }
      String field = n.asLiteral().getLexicalForm();

      Statement predicateStatement = listEntry.getProperty(TextVocab.pPredicate);
      if (predicateStatement == null) {
        throw new TextIndexException("Text map entry has no predicate property");
      }
      n = predicateStatement.getObject();
      if (!n.isURIResource()) {
        throw new TextIndexException(
            "Text map entry predicate property has non resource value : " + n);
      }
      mapDefs.put(field, n.asNode());

      Statement analyzerStatement = listEntry.getProperty(TextVocab.pAnalyzer);
      if (analyzerStatement != null) {
        n = analyzerStatement.getObject();
        if (!n.isResource()) {
          throw new TextIndexException("Text map entry analyzer property is not a resource : " + n);
        }
        Resource analyzerResource = n.asResource();
        Analyzer analyzer = (Analyzer) a.open(analyzerResource);
        analyzerDefs.put(field, analyzer);
      }

      // move on to the next element in the list
      listStmt = listResource.getProperty(RDF.rest);
    }

    // Primary field/predicate
    if (defaultField != null) {
      Collection<Node> c = mapDefs.get(defaultField);
      if (c.isEmpty())
        throw new TextIndexException("No definition of primary field '" + defaultField + "'");
    }

    EntityDefinition docDef = new EntityDefinition(entityField, defaultField);
    docDef.setGraphField(graphField);
    docDef.setLangField(langField);
    docDef.setUidField(uniqueIdField);
    for (String f : mapDefs.keys()) {
      for (Node p : mapDefs.get(f)) docDef.set(f, p);
    }
    for (String f : analyzerDefs.keySet()) {
      docDef.setAnalyzer(f, analyzerDefs.get(f));
    }
    return docDef;
  }
コード例 #19
0
 /**
  * Create a QueryExecution that will access a SPARQL service over HTTP
  *
  * @param service URL of the remote service
  * @param query Query string to execute
  * @return QueryExecution
  */
 public static QueryExecution sparqlService(String service, String query) {
   checkNotNull(service, "URL for service is null");
   checkArg(query);
   return sparqlService(service, QueryFactory.create(query));
 }
コード例 #20
0
ファイル: QueryManager.java プロジェクト: NCIP/rembrandt
 public static Query createQuery(QueryType typeOfQuery) {
   return QueryFactory.newQuery(typeOfQuery);
 }
コード例 #21
0
  @Test
  public void testKBtoWurcsSparqlTranslation() throws SparqlException {

    List<Translation> translations = Ebean.find(Translation.class).findList();
    HashSet<String> resultList = new HashSet<>();

    String ct = "";

    for (Translation translation : translations) {
      System.out.println("id check " + translation.id + " ct " + translation.ct);
      if (translation.ct == null) continue;

      if (translation.structure.id > 0) {

        ct = translation.ct;

        GlycoSequenceToWurcsSelectSparql s = new GlycoSequenceToWurcsSelectSparql("glycoct");
        SparqlEntity se = new SparqlEntity();
        ct = StringUtils.chomp(ct);
        System.out.println("ct on top: " + ct);
        if (ct != null) {
          se.setValue(
              GlycoSequenceToWurcsSelectSparql.FromSequence,
              ct.replaceAll("\n", "\\\\n")
                  .replaceAll("x\\(", "u\\(")
                  .replaceAll("\\)x", "\\)u")
                  .trim());
          s.setSparqlEntity(se);
          logger.debug(s.getSparql());

          Query query =
              QueryFactory.create(s.getSparql().replaceAll("null", "").replace("?Sequence", ""));
          System.out.println(
              "Id "
                  + translation.structure.id
                  + " Query: "
                  + s.getSparql().replaceAll("null", "").replace("?Sequence", ""));
          QueryExecution qe =
              QueryExecutionFactory.sparqlService("http://test.ts.glytoucan.org/sparql", query);
          ResultSet rs = qe.execSelect();

          List<SparqlEntity> results = new ArrayList<>();
          HashSet<String> resultsList = new HashSet<>();

          while (rs.hasNext()) {
            QuerySolution row = rs.next();
            Iterator<String> columns = row.varNames();
            SparqlEntity se2 = new SparqlEntity();
            while (columns.hasNext()) {
              String column = columns.next();
              RDFNode cell = row.get(column);

              if (cell.isResource()) {
                Resource resource = cell.asResource();
                // do something maybe with the OntModel???
                if (resource.isLiteral()) se.setValue(column, resource.asLiteral().getString());
                else se.setValue(column, resource.toString());
              } else if (cell.isLiteral()) {
                se.setValue(column, cell.asLiteral().getString());
              } else if (cell.isAnon()) {
                se.setValue(column, "anon");
              } else {
                se.setValue(column, cell.toString());
              }
            }
            results.add(se);
          }

          for (SparqlEntity entity : results) {
            // System.out.println("results: " + entity.getValue("PrimaryId"));
            resultList.add(
                translation.structure.id + "\t" + entity.getValue("PrimaryId").toString());
          }
        }
      }
    }

    for (String c : resultList) {
      System.out.println(c);
    }
  }
コード例 #22
0
  @Test
  public void testDO() {
    String doid = "1485";
    String queryString =
        "prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n"
            + "prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n"
            + "prefix owl: <http://www.w3.org/2002/07/owl#>\n"
            + "\n"
            + "select ?s ?p ?o \n"
            + "from <http://purl.obolibrary.org/obo/merged/DOID>\n"
            + "\n"
            + "WHERE {\n"
            + "   <http://purl.obolibrary.org/obo/DOID_"
            + doid
            + "> ?p ?o\n"
            + "}";

    Query query = QueryFactory.create(queryString);
    QueryExecution qExe =
        QueryExecutionFactory.sparqlService("http://sparql.hegroup.org/sparql/", query);
    ResultSet results = qExe.execSelect();
    ResultSetFormatter.out(System.out, results, query);

    assertNotNull(results);

    /*Model model = ModelFactory.createDefaultModel();
    Selector selector = new SimpleSelector(null, model.getProperty("<http://www.geneontology.org/formats/oboInOwl#hasDbXref>"), (RDFNode) null);  // you need to cast the last null as otherwise the method is ambigious
    */

    List<String> dbXref = new ArrayList<>();
    List<String> iao = new ArrayList<>();
    List<String> exactSynonym = new ArrayList<>();
    List<String> alternativeId = new ArrayList<>();
    String diseaseLabel;

    while (results.hasNext()) {
      QuerySolution querySolution = results.nextSolution();

      if (querySolution.get("p").toString().matches("rdfs:label ")) {
        diseaseLabel = querySolution.get("o").toString();
      }

      if (querySolution
          .get("p")
          .toString()
          .matches("http://www.geneontology.org/formats/oboInOwl#hasDbXref")) {
        System.out.println(
            querySolution.get("p").toString() + "   " + querySolution.get("o").toString());
        dbXref.add(querySolution.get("o").toString());
      }

      if (querySolution.get("p").toString().matches("http://purl.obolibrary.org/obo/IAO_0000115")) {
        System.out.println(
            querySolution.get("p").toString() + "   " + querySolution.get("o").toString());
        iao.add(querySolution.get("o").toString());
      }

      if (querySolution
          .get("p")
          .toString()
          .matches("http://www.geneontology.org/formats/oboInOwl#hasExactSynonym")) {
        System.out.println(
            querySolution.get("p").toString() + "   " + querySolution.get("o").toString());
        exactSynonym.add(querySolution.get("o").toString());
      }

      if (querySolution
          .get("p")
          .toString()
          .matches("http://www.geneontology.org/formats/oboInOwl#hasAlternativeId")) {
        System.out.println(
            querySolution.get("p").toString() + "   " + querySolution.get("o").toString());
        alternativeId.add(querySolution.get("o").toString());
      }
    }

    assertNotNull(dbXref);
    assertNotNull(iao);
  }
コード例 #23
0
  @Test
  public void testKBtoWurcsSparql() throws SparqlException {

    List<Structure> structures = Ebean.find(Structure.class).findList();
    HashSet<String> resultList = new HashSet<>();

    String ct = "";

    for (Structure structure : structures) {
      if (structure.id >= 7400) {

        if (structure.glycanst.startsWith("v--")) {
          structure.glycanst = structure.glycanst.replace("v--", "FreeEnd--");
        }

        if (structure.glycanst.startsWith("FreenEnd")) {
          structure.glycanst = structure.glycanst.replace("FreenEnd", "FreeEnd");
        }

        if (structure.glycanst.startsWith("FreeEnd?")) {
          structure.glycanst = structure.glycanst.replace("FreeEnd?", "FreeEnd--?");
        }

        if (structure.glycanst.startsWith("<Gly") || structure.glycanst.contains("0.0000u")) {
          continue;
        }

        System.out.println(structure.getGlycanst());

        BuilderWorkspace workspace = new BuilderWorkspace(new GlycanRendererAWT());
        workspace.setNotation("cfg"); // cfgbw | uoxf | uoxfcol | text
        GlycanRenderer renderer = workspace.getGlycanRenderer();
        org.eurocarbdb.application.glycanbuilder.Glycan glycan =
            org.eurocarbdb.application.glycanbuilder.Glycan.fromString(structure.glycanst.trim());
        if (glycan != null) {
          ct = glycan.toGlycoCTCondensed();
          System.out.println("this was the ct: " + ct);
          GlycoSequenceToWurcsSelectSparql s = new GlycoSequenceToWurcsSelectSparql("glycoct");
          SparqlEntity se = new SparqlEntity();
          ct = StringUtils.chomp(ct);
          se.setValue(
              GlycoSequenceToWurcsSelectSparql.FromSequence,
              ct.replaceAll("\n", "\\\\n")
                  .replaceAll("x\\(", "u\\(")
                  .replaceAll("\\)x", "\\)u")
                  .trim());
          s.setSparqlEntity(se);
          logger.debug(s.getSparql());

          Query query =
              QueryFactory.create(s.getSparql().replaceAll("null", "").replace("?Sequence", ""));
          System.out.println(
              "Id "
                  + structure.id
                  + " Query: "
                  + s.getSparql().replaceAll("null", "").replace("?Sequence", ""));
          QueryExecution qe =
              QueryExecutionFactory.sparqlService("http://test.ts.glytoucan.org/sparql", query);
          ResultSet rs = qe.execSelect();

          List<SparqlEntity> results = new ArrayList<>();
          HashSet<String> resultsList = new HashSet<>();

          while (rs.hasNext()) {
            QuerySolution row = rs.next();
            Iterator<String> columns = row.varNames();
            SparqlEntity se2 = new SparqlEntity();
            while (columns.hasNext()) {
              String column = columns.next();
              RDFNode cell = row.get(column);

              if (cell.isResource()) {
                Resource resource = cell.asResource();
                // do something maybe with the OntModel???
                if (resource.isLiteral()) se.setValue(column, resource.asLiteral().getString());
                else se.setValue(column, resource.toString());
              } else if (cell.isLiteral()) {
                se.setValue(column, cell.asLiteral().getString());
              } else if (cell.isAnon()) {
                se.setValue(column, "anon");
              } else {
                se.setValue(column, cell.toString());
              }
            }
            results.add(se);
          }

          for (SparqlEntity entity : results) {
            // System.out.println("results: " + entity.getValue("PrimaryId"));
            resultList.add(structure.id + "\t" + entity.getValue("PrimaryId").toString());
          }
        }
      }
    }
    PrintWriter writer = null;
    try {
      writer =
          new PrintWriter(
              new OutputStreamWriter(new FileOutputStream("/tmp/HashSet.txt"), "UTF-8"));
    } catch (UnsupportedEncodingException e) {
      e.printStackTrace();
    } catch (FileNotFoundException e) {
      e.printStackTrace();
    }
    for (String c : resultList) {
      System.out.println(c);
      writer.println(c);
    }
  }
コード例 #24
0
 private static Query makeQuery(String queryStr) {
   return QueryFactory.create(queryStr);
 }
コード例 #25
0
  /**
   * Starts a harvester with predefined queries to synchronize with the changes from the SPARQL
   * endpoint
   */
  public boolean sync() {
    logger.info("Sync resources newer than {}", startTime);

    String rdfQueryTemplate =
        "PREFIX xsd:<http://www.w3.org/2001/XMLSchema#> "
            + "SELECT DISTINCT ?resource WHERE { "
            + " GRAPH ?graph { %s }"
            + " ?graph <%s> ?time .  %s "
            + " FILTER (?time > xsd:dateTime(\"%s\")) }";

    String queryStr =
        String.format(
            rdfQueryTemplate, syncConditions, syncTimeProp, graphSyncConditions, startTime);
    Set<String> syncUris = executeSyncQuery(queryStr, "resource");

    if (syncUris == null) {
      logger.error("Errors occurred during sync procedure. Aborting!");
      return false;
    }

    /**
     * If desired, query for old data that has the sync conditions modified
     *
     * <p>This option is useful in the case in which the application indexes resources that match
     * some conditions. In this case, if they are modified and no longer match the initial
     * conditions, they will not be synchronized. When syncOldData is True, the modified resources
     * that no longer match the conditions are deleted.
     */
    int deleted = 0;
    int count = 0;
    if (this.syncOldData) {
      SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
      queryStr =
          String.format(
              rdfQueryTemplate,
              syncConditions,
              syncTimeProp,
              graphSyncConditions,
              sdf.format(new Date(0)));

      HashSet<String> allIndexURIs = executeSyncQuery(queryStr, "resource");

      if (allIndexURIs == null) {
        logger.error("Errors occurred during modified content sync query. Aborting!");
        return false;
      }

      deleted = removeMissingUris(allIndexURIs);
    }

    /* Prepare a series of bulk uris to be described so we can make
     * a smaller number of calls to the SPARQL endpoint. */
    ArrayList<ArrayList<String>> bulks = new ArrayList<ArrayList<String>>();
    ArrayList<String> currentBulk = new ArrayList<String>();

    for (String uri : syncUris) {
      currentBulk.add(uri);

      if (currentBulk.size() == EEASettings.DEFAULT_BULK_SIZE) {
        bulks.add(currentBulk);
        currentBulk = new ArrayList<String>();
      }
    }

    if (currentBulk.size() > 0) {
      bulks.add(currentBulk);
    }

    /* Execute RDF queries for the resources in each bulk */
    for (ArrayList<String> bulk : bulks) {
      String syncQuery = getSyncQueryStr(bulk);

      try {
        Query query = QueryFactory.create(syncQuery);
        QueryExecution qExec = QueryExecutionFactory.sparqlService(rdfEndpoint, query);
        try {
          Model constructModel = ModelFactory.createDefaultModel();
          qExec.execConstruct(constructModel);
          BulkRequestBuilder bulkRequest = client.prepareBulk();

          /**
           * When adding the model to ES do not use toDescribeURIs as the query already returned the
           * correct labels.
           */
          addModelToES(constructModel, bulkRequest, false);
          count += bulk.size();
        } catch (Exception e) {
          logger.error("Error while querying for modified content. {}", e.getLocalizedMessage());
          return false;
        } finally {
          qExec.close();
        }
      } catch (QueryParseException qpe) {
        logger.warn(
            "Could not parse Sync query. Please provide a relevant query. {}",
            qpe.getLocalizedMessage());
        return false;
      }
    }
    logger.info(
        "Finished synchronisation: Deleted {}, Updated {}/{}", deleted, count, syncUris.size());
    return true;
  }
コード例 #26
0
  @Test
  public void testWURCS() {

    String ct =
        "RES\n"
            + "1b:b-dglc-HEX-1:5\n"
            + "2s:n-acetyl\n"
            + "3b:b-dglc-HEX-1:5\n"
            + "4s:n-acetyl\n"
            + "5b:b-dman-HEX-1:5\n"
            + "6b:a-dman-HEX-1:5\n"
            + "7b:a-dman-HEX-1:5\n"
            + "8b:a-dman-HEX-1:5\n"
            + "9b:a-dman-HEX-1:5\n"
            + "10b:a-dman-HEX-1:5\n"
            + "LIN\n"
            + "1:1d(2+1)2n\n"
            + "2:1o(4+1)3d\n"
            + "3:3d(2+1)4n\n"
            + "4:3o(4+1)5d\n"
            + "5:5o(3+1)6d\n"
            + "6:6o(2+1)7d\n"
            + "7:5o(6+1)8d\n"
            + "8:8o(3+1)9d\n"
            + "9:8o(6+1)10d\n"
            + "UND\n"
            + "UND1:100.0:100.0\n"
            + "ParentIDs:7|9|10\n"
            + "SubtreeLinkageID1:o(2+1)d\n"
            + "RES\n"
            + "11b:a-dman-HEX-1:5";

    ct = ct.replaceAll("\n", "\\\\n");

    String queryString =
        "PREFIX glycan: <http://purl.jp/bio/12/glyco/glycan#>\n"
            + "PREFIX glytoucan:  <http://www.glytoucan.org/glyco/owl/glytoucan#>\n"
            + "PREFIX  xsd:  <http://www.w3.org/2001/XMLSchema#>\n"
            + "\n"
            + "SELECT DISTINCT ?Sequence\n"
            + "FROM <http://rdf.glytoucan.org>\n"
            + "FROM <http://rdf.glytoucan.org/sequence/wurcs>\n"
            + "\n"
            + "WHERE {\n"
            + "?SaccharideURI a glycan:saccharide .\n"
            + "?SaccharideURI glycan:has_glycosequence ?GlycanSequenceURI .\n"
            + "?GlycanSequenceURI glycan:has_sequence ?Sequence .\n"
            + "?GlycanSequenceURI glycan:in_carbohydrate_format glycan:carbohydrate_format_wurcs .\n"
            + "?SaccharideURI glycan:has_glycosequence ?FormatGlycoSequenceURI .\n"
            + "?FormatGlycoSequenceURI glycan:in_carbohydrate_format glycan:carbohydrate_format_glycoct .\n"
            + "?FormatGlycoSequenceURI glycan:has_sequence"
            + " \""
            + ct
            + "\""
            + "^^xsd:string ."
            + "}";

    System.out.println("String: " + queryString);

    Query query = QueryFactory.create(queryString);

    QueryExecution qExe =
        QueryExecutionFactory.sparqlService("http://test.ts.glytoucan.org/sparql", query);
    ResultSet results = qExe.execSelect();
    ResultSetFormatter.out(System.out, results, query);
  }
コード例 #27
0
  @Test
  public void testGlycotoucanCTSearch() {

    List<Structure> structures = Ebean.find(Structure.class).findList();
    String ct = "";

    for (Structure structure : structures) {
      if (structure.id >= 7400) {

        if (structure.glycanst.startsWith("v--")) {
          structure.glycanst = structure.glycanst.replace("v--", "FreeEnd--");
        }

        if (structure.glycanst.startsWith("FreenEnd")) {
          structure.glycanst = structure.glycanst.replace("FreenEnd", "FreeEnd");
        }

        if (structure.glycanst.startsWith("FreeEnd?")) {
          structure.glycanst = structure.glycanst.replace("FreeEnd?", "FreeEnd--?");
        }

        if (structure.glycanst.startsWith("<Gly") || structure.glycanst.contains("0.0000u")) {
          continue;
        }

        System.out.println(structure.getGlycanst());

        BuilderWorkspace workspace = new BuilderWorkspace(new GlycanRendererAWT());
        workspace.setNotation("cfg"); // cfgbw | uoxf | uoxfcol | text

        GlycanRenderer renderer = workspace.getGlycanRenderer();

        org.eurocarbdb.application.glycanbuilder.Glycan glycan =
            org.eurocarbdb.application.glycanbuilder.Glycan.fromString(structure.glycanst);
        ct = glycan.toGlycoCTCondensed();

        // System.out.println(ct);
        //  }
        // }

        /*String ct = "RES\\n" +
                "1b:a-dgal-HEX-1:5\\n" +
                "2s:n-acetyl\\n" +
                "3b:b-dgal-HEX-1:5\\n" +
                "4b:a-lgal-HEX-1:5|6:d\\n" +
                "5b:a-dgal-HEX-1:5\\n" +
                "6s:n-acetyl\\n" +
                "7b:b-dglc-HEX-1:5\\n" +
                "8s:n-acetyl\\n" +
                "LIN\\n" +
                "1:1d(2+1)2n\\n" +
                "2:1o(3+1)3d\\n" +
                "3:3o(2+1)4d\\n" +
                "4:3o(3+1)5d\\n" +
                "5:5d(2+1)6n\\n" +
                "6:1o(6+1)7d\\n" +
                "7:7d(2+1)8n";

        ct = "RES\n" +
                "1b:b-dglc-HEX-1:5\n" +
                "2s:n-acetyl\n" +
                "3b:b-dglc-HEX-1:5\n" +
                "4s:n-acetyl\n" +
                "5b:b-dman-HEX-1:5\n" +
                "6b:a-dman-HEX-1:5\n" +
                "7b:a-dman-HEX-1:5\n" +
                "8b:a-lgal-HEX-1:5|6:d\n" +
                "LIN\n" +
                "1:1d(2+1)2n\n" +
                "2:1o(4+1)3d\n" +
                "3:3d(2+1)4n\n" +
                "4:3o(4+1)5d\n" +
                "5:5o(3+1)6d\n" +
                "6:5o(6+1)7d\n" +
                "7:1o(6+1)8d\n" +
                "UND\n" +
                "UND1:100.0:100.0\n" +
                "ParentIDs:1|3|5|6|7|8\n" +
                "SubtreeLinkageID1:x(-1+1)x\n" +
                "RES\n" +
                "9b:b-dglc-HEX-1:5\n" +
                "10s:n-acetyl\n" +
                "11b:a-lgal-HEX-1:5|6:d\n" +
                "12b:b-dgal-HEX-1:5\n" +
                "13b:a-dgro-dgal-NON-2:6|1:a|2:keto|3:d\n" +
                "14s:n-acetyl\n" +
                "LIN\n" +
                "8:9d(2+1)10n\n" +
                "9:9o(3+1)11d\n" +
                "10:9o(4+1)12d\n" +
                "11:12o(-1+2)13d\n" +
                "12:13d(5+1)14n\n" +
                "UND2:100.0:100.0\n" +
                "ParentIDs:1|3|5|6|7|8\n" +
                "SubtreeLinkageID1:x(-1+1)x\n" +
                "RES\n" +
                "15b:b-dglc-HEX-1:5\n" +
                "16s:n-acetyl\n" +
                "17b:a-lgal-HEX-1:5|6:d\n" +
                "18b:b-dgal-HEX-1:5\n" +
                "LIN\n" +
                "13:15d(2+1)16n\n" +
                "14:15o(3+1)17d\n" +
                "15:15o(4+1)18d\n" +
                "UND3:100.0:100.0\n" +
                "ParentIDs:1|3|5|6|7|8\n" +
                "SubtreeLinkageID1:x(-1+1)x\n" +
                "RES\n" +
                "19b:b-dglc-HEX-1:5\n" +
                "20s:n-acetyl\n" +
                "21b:b-dgal-HEX-1:5\n" +
                "LIN\n" +
                "16:19d(2+1)20n\n" +
                "17:19o(4+1)21d";
                */

        ct = ct.replaceAll("\n", "\\\\n").replaceAll("x\\(", "u\\(").replaceAll("\\)x", "\\)u");
        System.out.println("new ct: " + ct);

        String queryString =
            "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n"
                + "PREFIX glycan: <http://purl.jp/bio/12/glyco/glycan#>\n"
                + "PREFIX wurcs: <http://www.glycoinfo.org/glyco/owl/wurcs#>\n"
                + "SELECT DISTINCT ?glycan ?c\n"
                + "# FROM <http://rdf.glycoinfo.org/wurcs/0.5.0>\n"
                + "# FROM <http://rdf.glycoinfo.org/wurcs/0.5.0/ms>\n"
                + "WHERE {\n"
                + "  ?glycan a \tglycan:glycosequence ;\n"
                + "\tglycan:in_carbohydrate_format  glycan:carbohydrate_format_glycoct ;\n"
                + "\tglycan:has_sequence\n"
                + "\t\t?c filter(contains(?c, \"RES\\n1b:b-dglc-HEX-1\")) .\n"
                +
                // "\t\t?c filter(contains(?c, \"" + ct + "\" )) .\n" +
                "\n"
                + "  }\n"
                + "  ORDER BY ?glycan\n"
                + "limit 10";

        System.out.println("String: " + queryString + "\t\tID: " + structure.id);

        Query query = QueryFactory.create(queryString);

        QueryExecution qExe =
            QueryExecutionFactory.sparqlService("http://test.ts.glytoucan.org/sparql", query);
        ResultSet results = qExe.execSelect();
        ResultSetFormatter.out(System.out, results, query);
      }
    }
  }