Exemple #1
0
  public static Model exec(Model model, final Table table, Query query) throws IOException {
    OntModel inferencedModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
    ElementData tableElementData =
        new ElementData() {
          @Override
          public Table getTable() {
            return table;
          }
        };
    for (Var var : table.getVars()) {
      tableElementData.add(var);
    }
    ElementGroup elementGroup = new ElementGroup();
    elementGroup.addElement(tableElementData);
    if (query.getQueryPattern() instanceof ElementGroup) {
      for (Element element : ((ElementGroup) query.getQueryPattern()).getElements()) {
        elementGroup.addElement(element);
      }
    } else {
      elementGroup.addElement(query.getQueryPattern());
    }
    query.setQueryPattern(elementGroup);

    //        QueryExecution ex = QueryExecutionFactory.create(query, model);
    QueryExecution ex = ARQFactory.get().createQueryExecution(query, model);
    if (query.isConstructType()) {
      ex.execConstruct(inferencedModel);
    } else {
      inferencedModel.add(ex.execSelect().getResourceModel());
    }
    return inferencedModel;
  }
  private void generateSample() {
    logger.info("Generating sample...");
    sample = ModelFactory.createDefaultModel();

    // we have to set up a new query execution factory working on our local model
    qef = new QueryExecutionFactoryModel(sample);
    reasoner = new SPARQLReasoner(qef);

    // get the page size
    // TODO put to base class
    long pageSize = 10000; // PaginationUtils.adjustPageSize(globalQef, 10000);

    ParameterizedSparqlString sampleQueryTemplate = getSampleQuery();
    sampleQueryTemplate.setIri("p", entityToDescribe.toStringID());
    Query query = sampleQueryTemplate.asQuery();
    query.setLimit(pageSize);

    boolean isEmpty = false;
    int i = 0;
    while (!isTimeout() && !isEmpty) {
      // get next sample
      logger.debug("Extending sample...");
      query.setOffset(i++ * pageSize);
      QueryExecution qe = ksQef.createQueryExecution(query);
      Model tmp = qe.execConstruct();
      sample.add(tmp);

      // if last call returned empty model, we can leave loop
      isEmpty = tmp.isEmpty();
    }
    logger.info("...done. Sample size: " + sample.size() + " triples");
  }
 private long doExecuteSparql(VitroRequest vreq) {
   OntModel jenaOntModel = ModelAccess.on(getServletContext()).getOntModel();
   OntModel source = ModelFactory.createOntologyModel(OntModelSpec.OWL_DL_MEM);
   String[] sourceModel = vreq.getParameterValues("sourceModelName");
   for (int i = 0; i < sourceModel.length; i++) {
     Model m = getModel(sourceModel[i], vreq);
     source.addSubModel(m);
   }
   Model destination = getModel(vreq.getParameter("destinationModelName"), vreq);
   String sparqlQueryStr = vreq.getParameter("sparqlQueryStr");
   String savedQueryURIStr = vreq.getParameter("savedQuery");
   String queryStr;
   if (savedQueryURIStr.length() == 0) {
     log.debug("Using entered query");
     queryStr = sparqlQueryStr;
   } else {
     Property queryStrProp = ResourceFactory.createProperty(SPARQL_QUERYSTR_PROP);
     jenaOntModel.enterCriticalSection(Lock.READ);
     try {
       Individual ind = jenaOntModel.getIndividual(savedQueryURIStr);
       log.debug("Using query " + savedQueryURIStr);
       queryStr = ((Literal) ind.getPropertyValue(queryStrProp)).getLexicalForm();
       queryStr =
           StringEscapeUtils.unescapeHtml(
               queryStr); // !!! We need to turn off automatic HTML-escaping for data property
                          // editing.
     } finally {
       jenaOntModel.leaveCriticalSection();
     }
   }
   Model tempModel = ModelFactory.createDefaultModel();
   Query query = SparqlQueryUtils.create(queryStr);
   QueryExecution qexec = QueryExecutionFactory.create(query, source);
   try {
     qexec.execConstruct(tempModel);
   } catch (QueryExecException qee) {
     qexec.execDescribe(tempModel);
   }
   destination.enterCriticalSection(Lock.WRITE);
   try {
     if (destination instanceof OntModel) {
       ((OntModel) destination).getBaseModel().notifyEvent(new EditEvent(null, true));
     } else {
       destination.notifyEvent(new EditEvent(null, true));
     }
     destination.add(tempModel);
   } finally {
     if (destination instanceof OntModel) {
       ((OntModel) destination).getBaseModel().notifyEvent(new EditEvent(null, false));
     } else {
       destination.notifyEvent(new EditEvent(null, false));
     }
     destination.leaveCriticalSection();
   }
   return tempModel.size();
 }
Exemple #4
0
  /** @inheritDoc */
  public Graph graphQuery(final String theQuery) throws QueryException {
    assertConnected();

    QueryExecution aQueryExec = query(theQuery);

    try {
      return JenaSesameUtils.asSesameGraph(aQueryExec.execConstruct());
    } finally {
      aQueryExec.close();
    }
  }
Exemple #5
0
 /**
  * @see org.caboto.jena.db.Database#executeConstructQuery(com.hp.hpl.jena.query.Query,
  *     com.hp.hpl.jena.query.QuerySolution)
  */
 public Model executeConstructQuery(Query query, QuerySolution initialBindings) {
   try {
     Data data = getData();
     QueryExecution queryExec = getQueryExecution(query, initialBindings, data);
     Model model = queryExec.execConstruct();
     queryExec.close();
     data.close();
     return model;
   } catch (DataException e) {
     e.printStackTrace();
     return null;
   }
 }
  @Override
  public void createSubModel() {
    ConnectTDB.dataset.begin(ReadWrite.WRITE);
    String queryBegin =
        "PREFIX rdf:  <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n"
            + "PREFIX mstr: <http://methodo-stat-tutor.com#>\n"
            + "\r\n"
            + "CONSTRUCT { "
            + "?EXO a mstr:Exercice .\n"
            + "?Q a mstr:Question .\n"
            + "?PZ a mstr:PubliZone .\n"
            + "?T a mstr:Tag .\n"
            + "?CZ a mstr:ChoiceZone .\n"
            + "?EXO mstr:hasQuestion ?Q .\n"
            + "?Q mstr:hasChoiceZone ?CZ .\n"
            + "?CZ mstr:hasPubliZone ?PZ .\n"
            + "?PZ mstr:hasTag ?T .\n"
            + "?T mstr:uri ?URI .\n"
            + "?EXO mstr:needNotion ?NN .\n"
            + "?EXO mstr:giveNotion ?GN .\n"
            + "} "
            + "WHERE {";
    String queryEnd = "}";

    String queryStudent =
        queryBegin
            + "?EXO a mstr:Exercice .\n"
            + "OPTIONAL {?EXO mstr:needNotion ?NN} .\n"
            + "OPTIONAL {?EXO mstr:giveNotion ?GN} .\n"
            + "?Q a mstr:Question .\n"
            + "?PZ a mstr:PubliZone .\n"
            + "?T a mstr:Tag .\n"
            + "?CZ a mstr:ChoiceZone .\n"
            + "?EXO mstr:hasQuestion ?Q .\n"
            + "?Q mstr:hasChoiceZone ?CZ .\n"
            + "?CZ mstr:hasPubliZone ?PZ .\n"
            + "?PZ mstr:hasTag ?T .\n"
            + "?T mstr:uri ?URI .\n"
            + "FILTER (?EXO = mstr:"
            + this.exercise
            + ")\n"
            + queryEnd;
    QueryExecution q1 = QueryExecutionFactory.create(queryStudent, ConnectTDB.dataset);
    Model modelStudent = q1.execConstruct();

    mergeModelsInSubModel(modelStudent);
    ConnectTDB.dataset.end();
  }
  public static void main(String[] args) {
    try {
      String request = FileUtils.readFileToString(new File("src/test/requete.rq"));

      Model trace = ModelFactory.createDefaultModel();
      trace.read(new FileInputStream("src/test/@obsels.rdf"), "", KtbsConstants.JENA_RDF_XML);

      Query query = QueryFactory.create(request, Syntax.syntaxARQ);

      // Execute the query and obtain results
      QueryExecution qe = QueryExecutionFactory.create(query, trace);
      Model resultModel = qe.execConstruct();
      resultModel.write(System.out, KtbsConstants.JENA_TURTLE, null);

    } catch (IOException e) {
      e.printStackTrace();
    }
  }
 protected Model executeConstructQuery(String query) {
   logger.trace("Sending query\n{} ...", query);
   QueryExecution qe = qef.createQueryExecution(query);
   try {
     Model model = qe.execConstruct();
     timeout = false;
     if (model.size() == 0) {
       fullDataLoaded = true;
     }
     logger.debug("Got " + model.size() + " triples.");
     return model;
   } catch (QueryExceptionHTTP e) {
     if (e.getCause() instanceof SocketTimeoutException) {
       logger.warn("Got timeout");
     } else {
       logger.error("Exception executing query", e);
     }
     return ModelFactory.createDefaultModel();
   }
 }
  private IRI executeRule(final Rule r, final IRI inputIRI) {
    try {
      PelletOptions.USE_ANNOTATION_SUPPORT = true;

      PelletOptions.TREAT_ALL_VARS_DISTINGUISHED = controller.isTreatAllVariablesDistinguished();

      QueryEngineType type = (QueryEngineType) controller.getQueryEngineType();

      final QueryExecution qe;
      final ByteArrayOutputStream w = new ByteArrayOutputStream();

      final Query qSelect = getSelectExampleQuery(r.getQuery());

      if (type.toPellet() != null) {
        final OWLOntology queryOntology = getInputOntologyForRule(inputIRI);

        final PelletReasoner reasoner =
            PelletReasonerFactory.getInstance().createReasoner(queryOntology);

        log.info("Ontology size: " + reasoner.getKB().getInfo());

        final Dataset ds = kb2ds(reasoner.getKB());

        final QueryExecution qeSelect =
            SparqlDLExecutionFactory.create(qSelect, ds, null, type.toPellet());

        final ResultSet rs = qeSelect.execSelect();
        controller.setSelect(r, rs.getResultVars(), ResultSetFormatter.toList(rs));

        qe =
            SparqlDLExecutionFactory.create(
                r.getQuery(), kb2ds(reasoner.getKB()), null, type.toPellet());
        qe.execConstruct().write(w);
      } else {
        final ByteArrayOutputStream w2 = new ByteArrayOutputStream();
        final Model model = ModelFactory.createDefaultModel();
        try {
          controller
              .getOWLOntologyManager()
              .saveOntology(queryOntology, new TurtleOntologyFormat(), w2);
          model.read(new ByteArrayInputStream(w2.toByteArray()), "", "TURTLE");

          final QueryExecution qeSelect = QueryExecutionFactory.create(qSelect, model);

          final ResultSet rs = qeSelect.execSelect();
          controller.setSelect(r, rs.getResultVars(), ResultSetFormatter.toList(rs));

          qe = QueryExecutionFactory.create(r.getQuery(), model);
          qe.execConstruct().write(w);
        } catch (OWLOntologyStorageException e) {
          // TODO Auto-generated catch block
          e.printStackTrace();
        }
      }

      final IRI outputIRI = getOntologyIRIForRuleName(r.getName());

      // loaded generated ontology
      final OWLOntology generatedOntology =
          controller
              .getOWLOntologyManager()
              .loadOntologyFromOntologyDocument(new ByteArrayInputStream(w.toByteArray()));
      controller.updateOntology(
          generatedOntology,
          outputIRI,
          inputIRI,
          controller.getRuleSpec().getResultFile(r).toURI());
      controller.setStatus("Rule " + r.getName() + " successfully executed");
      return outputIRI;
    } catch (OWLOntologyCreationException e1) {
      controller.setStatus(e1.getMessage());
      return null;
    }
  }
Exemple #10
0
 void runTestConstruct(Query query, QueryExecution qe) throws Exception {
   // Do the query!
   Model resultsActual = qe.execConstruct();
   compareGraphResults(resultsActual, query);
 }
 /**
  * Query SPARQL endpoint with a CONSTRUCT query
  *
  * @param qExec QueryExecution encapsulating the query
  * @return model retrieved by querying the endpoint
  */
 private Model getConstructModel(QueryExecution qExec) {
   return qExec.execConstruct(ModelFactory.createDefaultModel());
 }
  /**
   * Starts a harvester with predefined queries to synchronize with the changes from the SPARQL
   * endpoint
   */
  public boolean sync() {
    logger.info("Sync resources newer than {}", startTime);

    String rdfQueryTemplate =
        "PREFIX xsd:<http://www.w3.org/2001/XMLSchema#> "
            + "SELECT DISTINCT ?resource WHERE { "
            + " GRAPH ?graph { %s }"
            + " ?graph <%s> ?time .  %s "
            + " FILTER (?time > xsd:dateTime(\"%s\")) }";

    String queryStr =
        String.format(
            rdfQueryTemplate, syncConditions, syncTimeProp, graphSyncConditions, startTime);
    Set<String> syncUris = executeSyncQuery(queryStr, "resource");

    if (syncUris == null) {
      logger.error("Errors occurred during sync procedure. Aborting!");
      return false;
    }

    /**
     * If desired, query for old data that has the sync conditions modified
     *
     * <p>This option is useful in the case in which the application indexes resources that match
     * some conditions. In this case, if they are modified and no longer match the initial
     * conditions, they will not be synchronized. When syncOldData is True, the modified resources
     * that no longer match the conditions are deleted.
     */
    int deleted = 0;
    int count = 0;
    if (this.syncOldData) {
      SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
      queryStr =
          String.format(
              rdfQueryTemplate,
              syncConditions,
              syncTimeProp,
              graphSyncConditions,
              sdf.format(new Date(0)));

      HashSet<String> allIndexURIs = executeSyncQuery(queryStr, "resource");

      if (allIndexURIs == null) {
        logger.error("Errors occurred during modified content sync query. Aborting!");
        return false;
      }

      deleted = removeMissingUris(allIndexURIs);
    }

    /* Prepare a series of bulk uris to be described so we can make
     * a smaller number of calls to the SPARQL endpoint. */
    ArrayList<ArrayList<String>> bulks = new ArrayList<ArrayList<String>>();
    ArrayList<String> currentBulk = new ArrayList<String>();

    for (String uri : syncUris) {
      currentBulk.add(uri);

      if (currentBulk.size() == EEASettings.DEFAULT_BULK_SIZE) {
        bulks.add(currentBulk);
        currentBulk = new ArrayList<String>();
      }
    }

    if (currentBulk.size() > 0) {
      bulks.add(currentBulk);
    }

    /* Execute RDF queries for the resources in each bulk */
    for (ArrayList<String> bulk : bulks) {
      String syncQuery = getSyncQueryStr(bulk);

      try {
        Query query = QueryFactory.create(syncQuery);
        QueryExecution qExec = QueryExecutionFactory.sparqlService(rdfEndpoint, query);
        try {
          Model constructModel = ModelFactory.createDefaultModel();
          qExec.execConstruct(constructModel);
          BulkRequestBuilder bulkRequest = client.prepareBulk();

          /**
           * When adding the model to ES do not use toDescribeURIs as the query already returned the
           * correct labels.
           */
          addModelToES(constructModel, bulkRequest, false);
          count += bulk.size();
        } catch (Exception e) {
          logger.error("Error while querying for modified content. {}", e.getLocalizedMessage());
          return false;
        } finally {
          qExec.close();
        }
      } catch (QueryParseException qpe) {
        logger.warn(
            "Could not parse Sync query. Please provide a relevant query. {}",
            qpe.getLocalizedMessage());
        return false;
      }
    }
    logger.info(
        "Finished synchronisation: Deleted {}, Updated {}/{}", deleted, count, syncUris.size());
    return true;
  }