Пример #1
0
  void printFailedResultSetTest(
      Query query,
      QueryExecution qe,
      ResultSetRewindable qrExpected,
      ResultSetRewindable qrActual) {
    PrintStream out = System.out;
    out.println();
    out.println("=======================================");
    out.println("Failure: " + description());
    out.println("Query: \n" + query);
    //       if ( qe != null && qe.getDataset() != null )
    //       {
    //           out.println("Data: \n"+qe.getDataset().asDatasetGraph()) ;
    //       }
    out.println("Got: " + qrActual.size() + " --------------------------------");
    qrActual.reset();
    ResultSetFormatter.out(out, qrActual, query.getPrefixMapping());
    qrActual.reset();
    out.flush();

    out.println("Expected: " + qrExpected.size() + " -----------------------------");
    qrExpected.reset();
    ResultSetFormatter.out(out, qrExpected, query.getPrefixMapping());
    qrExpected.reset();

    out.println();
    out.flush();
  }
Пример #2
0
  void runTestSelect(Query query, QueryExecution qe) throws Exception {
    // Do the query!
    ResultSetRewindable resultsActual = ResultSetFactory.makeRewindable(qe.execSelect());

    qe.close();

    if (results == null) return;

    // Assumes resultSetCompare can cope with full isomorphism possibilities.
    ResultSetRewindable resultsExpected;
    if (results.isResultSet())
      resultsExpected = ResultSetFactory.makeRewindable(results.getResultSet());
    else if (results.isModel())
      resultsExpected = ResultSetFactory.makeRewindable(results.getModel());
    else {
      fail("Wrong result type for SELECT query");
      resultsExpected = null; // Keep the compiler happy
    }

    if (query.isReduced()) {
      // Reduced - best we can do is DISTINCT
      resultsExpected = unique(resultsExpected);
      resultsActual = unique(resultsActual);
    }

    // Hack for CSV : tests involving bNodes need manually checking.
    if (testItem.getResultFile().endsWith(".csv")) {
      resultsActual = convertToStrings(resultsActual);
      resultsActual.reset();

      int nActual = ResultSetFormatter.consume(resultsActual);
      int nExpected = ResultSetFormatter.consume(resultsExpected);
      resultsActual.reset();
      resultsExpected.reset();
      assertEquals("CSV: Different number of rows", nExpected, nActual);
      boolean b = resultSetEquivalent(query, resultsExpected, resultsActual);
      if (!b) System.out.println("Manual check of CSV results required: " + testItem.getName());
      return;
    }

    boolean b = resultSetEquivalent(query, resultsExpected, resultsActual);

    if (!b) {
      resultsExpected.reset();
      resultsActual.reset();
      boolean b2 = resultSetEquivalent(query, resultsExpected, resultsActual);
      printFailedResultSetTest(query, qe, resultsExpected, resultsActual);
    }
    assertTrue("Results do not match: " + testItem.getName(), b);

    return;
  }
Пример #3
0
 @Override
 public void writeTo(
     ResultSet results,
     Class<?> type,
     Type genericType,
     Annotation[] annotations,
     MediaType mediaType,
     MultivaluedMap<String, Object> httpHeaders,
     OutputStream entityStream)
     throws IOException, WebApplicationException {
   if (mediaType.equals(org.graphity.server.MediaType.APPLICATION_SPARQL_RESULTS_JSON_TYPE))
     ResultSetFormatter.outputAsJSON(entityStream, results);
   else ResultSetFormatter.outputAsXML(entityStream, results);
 }
Пример #4
0
  /** Run the test */
  public void run() {

    // optionally load properties file
    try {
      logger.info("Loading properties file");
      props.load(getClass().getClassLoader().getResourceAsStream("easyjena.properties"));
    } catch (IOException e) {
      logger.error("Error loading properties file", e);
    }

    // create ontology manager
    JenaOntologyManager jom = new JenaOntologyManager(props);

    try {
      // add mappings
      jom.addImportLocationMapping(
          "http://example.com/ontologies/example#",
          "/path/to/ontology.rdf",
          JenaOntologyManager.LoadingLocation.ALL);
      jom.loadImportLocationConfigFile(
          new File("/path/to/ontology/ImportLocationConfig.xml").getPath());

      // load model
      Model myModel =
          jom.loadOntology("http://xmlns.com/foaf/0.1/", JenaOntologyManager.LoadingLocation.ALL);

      // inference
      myModel.add(jom.runSPINInferences(myModel));
      myModel.add(jom.runClassLevelInferences(myModel, JenaOntologyManager.ReasonerType.RDFS));
      myModel.add(jom.runTemplate("http://example.com#myTemplate", myModel));
      myModel.add(jom.runTemplatesOfClass("http://example.com#myTemplateClass", myModel));

      // run a template with spin arguments
      Map<String, String> arguments = new HashMap<>();
      arguments.put("foo", "http://example.com#bar");
      myModel.add(jom.runTemplate("http://example.com#myTemplateWithArgs", arguments, myModel));

      // SPARQL
      myModel.add(jom.update("INSERT DATA { foaf:Test rdf:comment \"TestComment\" }", myModel));
      myModel.add(
          jom.queryConstruct(
              "CONSTRUCT { ?s rdf:comment \"Comment\" } WHERE { ?s ?p ?o }", myModel));

      String query =
          "SELECT * WHERE {\n"
              + "?s rdfs:subClassOf* owl:Class .\n"
              + "FILTER(!isBlank(?s)) .\n"
              + "}";
      logger.info(
          "\n"
              + ResultSetFormatter.asText(
                  jom.querySelect(query, myModel, OntModelSpec.OWL_MEM_RDFS_INF)));

      // save to file
      jom.saveModel(myModel, "/path/to/dir/test.ttl", JenaOntologyManager.ModelFormat.N3);

    } catch (FileNotFoundException e) {
      logger.error("Ontology not found", e);
    }
  }
Пример #5
0
  // TEMPORARY
  private boolean checkResultsByModel(
      Query query, Model expectedModel, ResultSetRewindable results) {
    // Fudge - can't cope with ordered results properly.  The output writer for ResultSets does nto
    // add rs:index.

    results.reset();
    Model actualModel = ResultSetFormatter.toModel(results);
    // Tidy the models.
    // Very regretable.

    expectedModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultSet);
    expectedModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultSolution);
    expectedModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultBinding);
    expectedModel.removeAll(null, ResultSetGraphVocab.size, (RDFNode) null);
    expectedModel.removeAll(null, ResultSetGraphVocab.index, (RDFNode) null);

    actualModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultSet);
    actualModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultSolution);
    actualModel.removeAll(null, RDF.type, ResultSetGraphVocab.ResultBinding);
    actualModel.removeAll(null, ResultSetGraphVocab.size, (RDFNode) null);
    actualModel.removeAll(null, ResultSetGraphVocab.index, (RDFNode) null);

    boolean b = expectedModel.isIsomorphicWith(actualModel);
    if (!b) {
      System.out.println("---- Expected");
      expectedModel.write(System.out, "TTL");
      System.out.println("---- Actual");
      actualModel.write(System.out, "TTL");
      System.out.println("----");
    }
    return b;
  }
Пример #6
0
  public static void main(String[] args) {
    Store store = HBaseRdfFactory.connectStore("Store/hbaserdf-simple.ttl");
    store.getTableFormatter().format();

    Model model = HBaseRdfFactory.connectDefaultModel(store);

    model.add(
        model.createResource("http://example.org/person#John"),
        VCARD.FN,
        model.asRDFNode(Node.createLiteral("John Smith")));
    model.add(
        model.createResource("http://example.org/person#John"),
        VCARD.EMAIL,
        model.asRDFNode(Node.createLiteral("*****@*****.**")));
    model.add(
        model.createResource("http://example.org/person#Jim"),
        VCARD.FN,
        model.asRDFNode(Node.createLiteral("Jim Mason")));
    model.add(
        model.createResource("http://example.org/person#Jim"),
        VCARD.EMAIL,
        model.asRDFNode(Node.createLiteral("*****@*****.**")));
    model.add(
        model.createResource("http://example.org/person#Bob"),
        VCARD.FN,
        model.asRDFNode(Node.createLiteral("Bob Brown")));
    model.add(
        model.createResource("http://example.org/person#Bob"),
        VCARD.EMAIL,
        model.asRDFNode(Node.createLiteral("*****@*****.**")));

    StmtIterator iter = model.listStatements();
    while (iter.hasNext()) {
      System.out.println(iter.next().toString());
    }

    iter = model.getResource("http://example.org/person#John").listProperties();
    while (iter.hasNext()) {
      System.out.println(iter.next().toString());
    }

    ResIterator resIter = model.listSubjects();
    while (resIter.hasNext()) {
      System.out.println(resIter.next().toString());
    }

    String query =
        " PREFIX vcard: <http://www.w3.org/2001/vcard-rdf/3.0#> "
            + " SELECT ?x "
            + " WHERE "
            + " { "
            + " 		<http://example.org/person#John> vcard:FN ?x "
            + " } ";

    QueryExecution qe = QueryExecutionFactory.create(query, model);
    ResultSet rs = qe.execSelect();
    ResultSetFormatter.out(rs);
  }
Пример #7
0
 public static void logResults(String name, ResultSetRewindable results) {
   if (log.isLoggable(Level.WARNING)) {
     log.warning(name + " (" + results.size() + ")");
     results.reset();
     ByteArrayOutputStream out = new ByteArrayOutputStream();
     ResultSetFormatter.output(out, results, ResultsFormat.FMT_RDF_TTL);
     log.warning("\n" + out.toString());
   }
 }
Пример #8
0
  /** Rigourous Test :-) */
  public void testApp() {
    final List resultvars = QueryFactory.create("select ?x ?y WHERE {?x a ?Y}").getResultVars();
    System.out.println(
        ResultSetFormatter.asXMLString(
            new ResultSet() {

              public Model getResourceModel() {
                // TODO Auto-generated method stub
                return null;
              }

              public List getResultVars() {
                // TODO Auto-generated method stub
                return resultvars;
              }

              public int getRowNumber() {
                // TODO Auto-generated method stub
                return 0;
              }

              public boolean hasNext() {
                // TODO Auto-generated method stub
                return false;
              }

              public boolean isOrdered() {
                // TODO Auto-generated method stub
                return false;
              }

              public Object next() {
                // TODO Auto-generated method stub
                return null;
              }

              public Binding nextBinding() {
                // TODO Auto-generated method stub
                return null;
              }

              public QuerySolution nextSolution() {
                // TODO Auto-generated method stub
                return null;
              }

              public void remove() {
                // TODO Auto-generated method stub

              }
            }));
  }
Пример #9
0
  public String executeQuery(String queryString) {
    /*
    //System.out.println(queryString);
    Query query = QueryFactory.create(queryString);

    QueryExecution qe = QueryExecutionFactory.create(query, model);
    ResultSet results = qe.execSelect();

    ByteArrayOutputStream ostream = new ByteArrayOutputStream();

    ResultSetFormatter.out(ostream, results, query);
    //ResultSetFormatter.out(System.out, results, query);
    String r = "";
    try{
        r = new String(ostream.toByteArray(), "UTF-8");
        //System.out.println(r);
    }
    catch(Exception e){
        System.out.println(e.getMessage());
    }
    qe.close();
    return r;
    */

    //// new added for test JSON output
    try {
      Query query = QueryFactory.create(queryString);

      QueryExecution qe = QueryExecutionFactory.create(query, model);
      ResultSet results = qe.execSelect();

      ByteArrayOutputStream ostream = new ByteArrayOutputStream();
      ResultSetFormatter.outputAsJSON(ostream, results);
      // ResultSetFormatter.out(ostream, results, query);
      // ResultSetFormatter.out(System.out, results, query);
      String r = "";
      try {
        r = new String(ostream.toByteArray(), "UTF-8");
        // System.out.println(r);
      } catch (Exception e) {
        System.out.println(e.getMessage());
      }
      qe.close();
      return r;
    } catch (Exception e) {
      System.out.println(e.toString());
      return "";
    }
  }
  private static void query(Dataset dataset) {
    Query q =
        QueryFactory.create(
            "PREFIX pf: <http://jena.hpl.hp.com/ARQ/property#>"
                + "SELECT * WHERE {"
                + "  GRAPH ?g { "
                + "    ?doc ?p ?lit ."
                + "    (?lit ?score ) pf:textMatch '+london' ."
                + "  } "
                + "}");

    QueryExecution qe = QueryExecutionFactory.create(q, dataset);
    ResultSet res = qe.execSelect();
    ResultSetFormatter.out(res);
    qe.close();
  }
Пример #11
0
  public static void main2(String[] args) {
    try {
      String request = FileUtils.readFileToString(new File("src/test/requete.rq"));

      Model trace = ModelFactory.createDefaultModel();
      trace.read(new FileInputStream("src/test/obsels.rdf"), "", KtbsConstants.JENA_RDF_XML);

      Query query = QueryFactory.create(request, Syntax.syntaxARQ);

      // Execute the query and obtain results
      QueryExecution qExec = QueryExecutionFactory.create(query, trace);
      ResultSetFormatter.out(System.out, qExec.execSelect(), query);

    } catch (IOException e) {
      e.printStackTrace();
    }
  }
Пример #12
0
  public ArrayList<QuerySolution> executeQueryRaw(String queryString) {
    // System.out.println(queryString);
    try {
      Query query = QueryFactory.create(queryString);

      QueryExecution qe = QueryExecutionFactory.create(query, model);
      ResultSet results = qe.execSelect();
      /*
      ByteArrayOutputStream ostream = new ByteArrayOutputStream();
      ResultSetFormatter.out(ostream, results, query);
      //ResultSetFormatter.out(System.out, results, query);
      String r = "";
      try{
          r = new String(ostream.toByteArray(), "UTF-8");
          System.out.println(r);
      }
      catch(Exception e){
          System.out.println(e.getMessage());
      }
      */

      /*
      ArrayList<QuerySolution> resList = new ArrayList<QuerySolution>();
      if(results.hasNext()) {

          QuerySolution qs = results.next();
          resList.add(qs);
          //double x = qs.getLiteral("x").getFloat();
          //Literal y = qs.getLiteral("y");
          //Literal theta = qs.getLiteral("theta");
      }
      */
      ArrayList<QuerySolution> resList = (ArrayList) ResultSetFormatter.toList(results);
      qe.close();
      return resList; // results;
    } catch (Exception e) {
      System.out.println(e.toString());
      return new ArrayList<QuerySolution>();
    }
  }
Пример #13
0
  public static void main(String... argv) {
    String queryString = "SELECT * { ?s ?p ?o }";
    Query query = QueryFactory.create(queryString);
    Store store = SDBFactory.connectStore("sdb.ttl");

    // Must be a DatasetStore to trigger the SDB query engine.
    // Creating a graph from the Store, and adding it to a general
    // purpose dataset will not necesarily exploit full SQL generation.
    // The right answers will be obtained but slowly.

    Dataset ds = DatasetStore.create(store);
    QueryExecution qe = QueryExecutionFactory.create(query, ds);
    try {
      ResultSet rs = qe.execSelect();
      ResultSetFormatter.out(rs);
    } finally {
      qe.close();
    }

    // Close the SDB conenction which also closes the underlying JDBC connection.
    store.getConnection().close();
    store.close();
  }
  private IRI executeRule(final Rule r, final IRI inputIRI) {
    try {
      PelletOptions.USE_ANNOTATION_SUPPORT = true;

      PelletOptions.TREAT_ALL_VARS_DISTINGUISHED = controller.isTreatAllVariablesDistinguished();

      QueryEngineType type = (QueryEngineType) controller.getQueryEngineType();

      final QueryExecution qe;
      final ByteArrayOutputStream w = new ByteArrayOutputStream();

      final Query qSelect = getSelectExampleQuery(r.getQuery());

      if (type.toPellet() != null) {
        final OWLOntology queryOntology = getInputOntologyForRule(inputIRI);

        final PelletReasoner reasoner =
            PelletReasonerFactory.getInstance().createReasoner(queryOntology);

        log.info("Ontology size: " + reasoner.getKB().getInfo());

        final Dataset ds = kb2ds(reasoner.getKB());

        final QueryExecution qeSelect =
            SparqlDLExecutionFactory.create(qSelect, ds, null, type.toPellet());

        final ResultSet rs = qeSelect.execSelect();
        controller.setSelect(r, rs.getResultVars(), ResultSetFormatter.toList(rs));

        qe =
            SparqlDLExecutionFactory.create(
                r.getQuery(), kb2ds(reasoner.getKB()), null, type.toPellet());
        qe.execConstruct().write(w);
      } else {
        final ByteArrayOutputStream w2 = new ByteArrayOutputStream();
        final Model model = ModelFactory.createDefaultModel();
        try {
          controller
              .getOWLOntologyManager()
              .saveOntology(queryOntology, new TurtleOntologyFormat(), w2);
          model.read(new ByteArrayInputStream(w2.toByteArray()), "", "TURTLE");

          final QueryExecution qeSelect = QueryExecutionFactory.create(qSelect, model);

          final ResultSet rs = qeSelect.execSelect();
          controller.setSelect(r, rs.getResultVars(), ResultSetFormatter.toList(rs));

          qe = QueryExecutionFactory.create(r.getQuery(), model);
          qe.execConstruct().write(w);
        } catch (OWLOntologyStorageException e) {
          // TODO Auto-generated catch block
          e.printStackTrace();
        }
      }

      final IRI outputIRI = getOntologyIRIForRuleName(r.getName());

      // loaded generated ontology
      final OWLOntology generatedOntology =
          controller
              .getOWLOntologyManager()
              .loadOntologyFromOntologyDocument(new ByteArrayInputStream(w.toByteArray()));
      controller.updateOntology(
          generatedOntology,
          outputIRI,
          inputIRI,
          controller.getRuleSpec().getResultFile(r).toURI());
      controller.setStatus("Rule " + r.getName() + " successfully executed");
      return outputIRI;
    } catch (OWLOntologyCreationException e1) {
      controller.setStatus(e1.getMessage());
      return null;
    }
  }
Пример #15
0
  public static void main(String args[]) {
    String SOURCE = "http://spitfire-project.eu/ontology.rdf",
        SOURCE1 = "http://spitfire-project.eu/sn.rdf",
        NS = "http://spitfire-project.eu/ontology/ns/",
        NS1 = "http://spitfire-project.eu/ontology/ns/sn/";

    // create a model using reasoner
    OntModel model1_reasoner =
        ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM_MICRO_RULE_INF);
    OntModel model_instances =
        ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM_MICRO_RULE_INF);
    // create a model which doesn't use a reasoner
    OntModel model2_noreasoner = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);

    /** ===========1. STORE IN A NON-OwlFull MODEL==============* */
    // read the RDF/XML file
    model1_reasoner.read(SOURCE, "RDF/XML");
    model1_reasoner.read(SOURCE1, "RDF/XML");

    model2_noreasoner.read(SOURCE, "RDF/XML");

    model2_noreasoner.read(SOURCE1, "RDF/XML");
    model1_reasoner.add(
        model1_reasoner.createResource(NS + "containedIn"), RDF.type, OWL.TransitiveProperty);

    // add the instances
    //        model1_reasoner.add(model1_reasoner.createResource(NS+"fan123"), RDF.type,
    //        		model1_reasoner.createResource(NS1+"Fan"));
    //		model_instances.add(model_instances.createResource(NS+"fan123"), RDF.type,
    //				model_instances.createResource(NS1+"Fan"));

    model_instances.add(
        model_instances.getProperty(NS + "containedIn"),
        OWL.equivalentProperty,
        model_instances.createProperty(
            "http://www.ontologydesignpatterns.org/ont/dul/DUL.owl#hasLocation"));
    model_instances.add(
        model_instances.createResource(NS + "desk_a"),
        model_instances.getProperty(NS + "containedIn"),
        model_instances.createResource(NS + "floor3"));
    model_instances.add(
        model2_noreasoner.createResource(NS + "floor3"),
        model_instances.getProperty(NS + "containedIn"),
        model_instances.createResource(NS + "cti"));
    //		model1_reasoner.add(model2_noreasoner);

    model1_reasoner.add(model2_noreasoner);

    // prints out the RDF/XML structure
    printModel(model1_reasoner, null);
    printModel(model1_reasoner, model1_reasoner.getProperty(NS + "containedIn"));
    //		printModel(model2_noreasoner);

    /** ===========2. STORE IN THE TDB==============* */
    // Direct way: Make a TDB-backed dataset
    String directory =
        System.getProperty("user.dir")
            + ".ld4s/tdb"
            + LD4SConstants.SYSTEM_SEPARATOR
            + "LD4SDataset1";
    File dirf = new File(directory);
    if (!dirf.exists()) {
      dirf.mkdirs();
    }
    Dataset dataset = TDBFactory.createDataset(directory);
    TDB.sync(dataset);

    Resource subj = model1_reasoner.listSubjects().next();
    dataset.begin(ReadWrite.WRITE);
    try {
      dataset.addNamedModel(subj.getURI(), model1_reasoner);
      dataset.addNamedModel(NS + "desk_a", model_instances);
      dataset.commit();

      // Or call .abort()
    } catch (Exception e) {
      e.printStackTrace();
    } finally {
      dataset.end();
      dataset.close();
    }

    /** ===========3. QUERY==============* */
    // Create a new query
    String queryString =
        "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> "
            + "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>  "
            + "PREFIX dul: <http://www.ontologydesignpatterns.org/ont/dul/DUL.owl#>  "
            + "PREFIX spt: <"
            + NS
            + ">  "
            + "select ?uri "
            + "where { "
            + "?uri dul:hasLocation spt:cti  "
            + "} ";
    Query query = QueryFactory.create(queryString);

    System.out.println("----------------------");

    System.out.println("Query Result Sheet");

    System.out.println("----------------------");

    System.out.println("Direct&Indirect Descendants (model1)");

    System.out.println("-------------------");

    // Execute the query and obtain results
    QueryExecution qe = QueryExecutionFactory.create(query, model1_reasoner);
    com.hp.hpl.jena.query.ResultSet results = qe.execSelect();

    // Output query results
    ResultSetFormatter.out(System.out, results, query);

    qe.close();

    System.out.println("----------------------");
    System.out.println("Only Direct Descendants (model2)");
    System.out.println("----------------------");

    // Execute the query and obtain results
    qe = QueryExecutionFactory.create(query, model2_noreasoner);
    results = qe.execSelect();

    // Output query results
    ResultSetFormatter.out(System.out, results, query);
    qe.close();
  }
Пример #16
0
  // Using Stardog with the [Jena](http://jena.apache.org) API
  // -------------------
  // In this example we'll show how to use the Stardog Jena API bindings.
  public static void main(String[] args) throws Exception {
    // Creating a Server
    // -----------------
    // You'll need a server to connect to, obviously.  For the example, lets create an embedded
    // server.
    Server aServer = Stardog.buildServer().bind(SNARLProtocolConstants.EMBEDDED_ADDRESS).start();

    try {
      // Next we'll establish a admin connection to Stardog so we can create a database to use for
      // the example
      AdminConnection aAdminConnection =
          AdminConnectionConfiguration.toEmbeddedServer().credentials("admin", "admin").connect();

      try {
        // If the database already exists, we'll drop it and create a fresh copy
        if (aAdminConnection.list().contains("testJena")) {
          aAdminConnection.drop("testJena");
        }

        aAdminConnection.createMemory("testJena");
      } finally {
        aAdminConnection.close();
      }

      // Now we open a Connection our new database
      Connection aConn =
          ConnectionConfiguration.to("testJena").credentials("admin", "admin").connect();

      // Then we obtain a Jena `Model` for the specified stardog database which is backed by our
      // `Connection`
      Model aModel = SDJenaFactory.createModel(aConn);
      try {

        // Start a transaction before adding the data.  This is not required, but it is faster to
        // group the entire add into a single transaction rather
        // than rely on the auto commit of the underlying stardog connection.
        aModel.begin();

        // Read data into the model.  note, this will add statement at a time.  Bulk loading needs
        // to be performed directly with the BulkUpdateHandler provided
        // by the underlying graph, or read in files in RDF/XML format, which uses the bulk loader
        // natively.  Alternatively, you can load data into the stardog
        // database using it's native API via the command line client.
        aModel.getReader("N3").read(aModel, new FileInputStream("data/sp2b_10k.n3"), "");

        // When you're done adding, you need to commit the changes
        aModel.commit();

        // Query that we will run against the data we just loaded
        String aQueryString =
            "select * where { ?s ?p ?o. filter(?s = <http://localhost/publications/articles/Journal1/1940/Article1>).}";

        // Create a query...
        Query aQuery = QueryFactory.create(aQueryString);

        // ... and run it
        QueryExecution aExec = QueryExecutionFactory.create(aQuery, aModel);

        try {
          // Now print the results
          ResultSetFormatter.out(aExec.execSelect(), aModel);
        } finally {
          // Always close the execution
          aExec.close();
        }
      } finally {
        // close the model to free up the connection to the stardog database
        aModel.close();
      }
    } finally {
      // You must stop the server when you're done
      aServer.stop();
    }
  }
  public String executeQuery(String query, String t) {
    String res = "";
    Query query1 = QueryFactory.create(query);
    QueryExecution qexec = QueryExecutionFactory.create(query1, ontologie);
    Pattern p = Pattern.compile("([0-9]+\\.[0-9])");

    try {
      if (query.toLowerCase().contains("ask")) {
        boolean resultsASK = qexec.execAsk();
        if (resultsASK) {
          return "true";
        } else {
          return "false";
        }
      } else if (query.toLowerCase().contains("select")) {
        ResultSet results = qexec.execSelect();
        res = "<table border=\"1\"><tr><th>Variable</th><th>Value</th></tr>";
        while (results.hasNext()) {
          QuerySolution next = results.next();
          Iterator<String> varNames = next.varNames();

          while (varNames.hasNext()) {
            String next1 = varNames.next(); // nomde la variable dans le select
            System.out.println("next1= " + next1);
            String e = next.get(next1).toString(); // valeur que prend  la variable
            // ignorer les rdf:type de type resource, owl:prop....
            if (e.equals("http://www.w3.org/2000/01/rdf-schema#Class")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Property")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#Datatype")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#List")
                || e.equals("http://www.w3.org/2004/03/trix/rdfg-1Graph")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#subPropertyOf")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#range")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#Resource")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#Literal")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#label")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Statement")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#subClassOf")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#subject")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#object")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#nil")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#range")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#rest")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#first")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#XMLLiteral")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#comment")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#ContainerMembershipProperty")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#predicate")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Seq")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#seeAlso")
                || e.equals("http://www.w3.org/1999/02/22-rdf-syntax-ns#Bag")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#seeAlso")
                || e.equals("http://www.w3.org/2000/01/rdf-schema#domain")) {
              continue;
            }
            if (t != null && !t.isEmpty()) { // appliquer le trust
              //                            String[] split_res=next.get(next1).toString().split("
              // ");
              //                            for(String s:split_res){
              Matcher m = p.matcher(next.get(next1).toString());
              if (m.find()) {
                float parseInt = Float.parseFloat(next.get(next1).toString());
                float parseInt1 = Float.parseFloat(t);
                e = Float.toString(parseInt * parseInt1);
              }
              //                            res = res + "<tr> <td><b>" + next1 + ": </b></td><td>" +
              // e+"</td></tr>";
              //                            }
            } // else
            {
              res = res = res + " <tr> <td><b>" + next1 + ": </b></td><td>" + e + "</td></tr>";
            }
          }
          res = res = res + " <tr> <td>New line</td></tr>";
        }

        res = res + "</table>";
        ResultSetFormatter.out(System.out, results, query1);
      } else { // Cas du describe
        Model describeModel = qexec.execDescribe();

        res = describeModel.toString();
      }
    } finally {
      qexec.close();
    }

    return res;
  }
Пример #18
0
 //	public static String fetch(boolean forceUpdate, File cacheDir) {
 public static String fetch() {
   ResultSet resultSet = SparqlService.query("http://eculture2.cs.vu.nl:5020/sparql/", QUERY);
   return ResultSetFormatter.asText(resultSet);
 }