/**
  * Leo Bard spotted a problem whereby removing a reified statement from a model with style
  * Standard didn't leave the model empty. Here's a test for it.
  */
 public void testLeosBug() {
   Model A = getModel();
   Statement st = statement(A, "pigs fly south");
   ReifiedStatement rst = st.createReifiedStatement("eh:pointer");
   A.removeReification(rst);
   assertIsoModels(ModelFactory.createDefaultModel(), A);
 }
  /**
   * Query SPARQL endpoint with a SELECT query
   *
   * @param qExec QueryExecution encapsulating the query
   * @return model retrieved by querying the endpoint
   */
  private Model getSelectModel(QueryExecution qExec) {
    Model model = ModelFactory.createDefaultModel();
    Graph graph = model.getGraph();
    ResultSet results = qExec.execSelect();

    while (results.hasNext()) {
      QuerySolution sol = results.next();
      String subject;
      String predicate;
      RDFNode object;

      try {
        subject = sol.getResource("s").toString();
        predicate = sol.getResource("p").toString();
        object = sol.get("o");
      } catch (NoSuchElementException e) {
        logger.error("SELECT query does not return a (?s ?p ?o) Triple");
        continue;
      }

      Node objNode;
      if (object.isLiteral()) {
        Literal obj = object.asLiteral();
        objNode = NodeFactory.createLiteral(obj.getString(), obj.getDatatype());
      } else {
        objNode = NodeFactory.createLiteral(object.toString());
      }

      graph.add(
          new Triple(NodeFactory.createURI(subject), NodeFactory.createURI(predicate), objNode));
    }

    return model;
  }
Exemple #3
0
 @Test
 public void testCalendarLiterals() {
   Calendar now = Calendar.getInstance();
   Literal A = ModelFactory.createDefaultModel().createTypedLiteral(now);
   Literal B = ResourceFactory.createTypedLiteral(now);
   assertEquals(A, B);
 }
 /** Load the premises or conclusions for the test. */
 public Model getDoc(Resource test, Property docType) throws IOException {
   Model result = ModelFactory.createDefaultModel();
   StmtIterator si = test.listProperties(docType);
   while (si.hasNext()) {
     String fname = si.nextStatement().getObject().toString() + ".rdf";
     loadFile(fname, result);
   }
   return result;
 }
 /**
  * Answer a new model which is the aggregation of
  *
  * <ul>
  *   <li>the statements of <code>model</code>
  *   <li>the non-bnode subclass statements of <code>schema</code>
  *   <li>the subclass closure of those statements
  *   <li>the rdf:type statements implied by the rdfs:domain statements of <code>schema</code> and
  *       the <code>model</code> statements using that statements property
  *   <li>similarly for rdfs:range
  *   <li>the rdf:type statements implied by the subclass closure
  * </ul>
  */
 public static Model withSchema(Model model, Model schema) {
   Model result = ModelFactory.createDefaultModel().add(model);
   addSubclassesFrom(result, schema);
   addSubClassClosure(result);
   addDomainTypes(result, schema);
   addRangeTypes(result, schema);
   addIntersections(result, schema);
   addSupertypes(result);
   return result;
 }
 /** Run a single test of any sort, return true if the test succeeds. */
 public boolean doRunTest(Resource test) throws IOException {
   if (test.hasProperty(RDF.type, OWLTest.PositiveEntailmentTest)
       || test.hasProperty(RDF.type, OWLTest.NegativeEntailmentTest)
       || test.hasProperty(RDF.type, OWLTest.OWLforOWLTest)
       || test.hasProperty(RDF.type, OWLTest.ImportEntailmentTest)
       || test.hasProperty(RDF.type, OWLTest.TrueTest)) {
     // Entailment tests
     boolean processImports = test.hasProperty(RDF.type, OWLTest.ImportEntailmentTest);
     Model premises = getDoc(test, RDFTest.premiseDocument, processImports);
     Model conclusions = getDoc(test, RDFTest.conclusionDocument);
     comprehensionAxioms(premises, conclusions);
     long t1 = System.currentTimeMillis();
     InfGraph graph = reasoner.bind(premises.getGraph());
     if (printProfile) {
       ((FBRuleInfGraph) graph).resetLPProfile(true);
     }
     Model result = ModelFactory.createModelForGraph(graph);
     boolean correct = WGReasonerTester.testConclusions(conclusions.getGraph(), result.getGraph());
     long t2 = System.currentTimeMillis();
     lastTestDuration = t2 - t1;
     if (printProfile) {
       ((FBRuleInfGraph) graph).printLPProfile();
     }
     if (test.hasProperty(RDF.type, OWLTest.NegativeEntailmentTest)) {
       correct = !correct;
     }
     return correct;
   } else if (test.hasProperty(RDF.type, OWLTest.InconsistencyTest)) {
     //            System.out.println("Starting: " + test);
     Model input = getDoc(test, RDFTest.inputDocument);
     long t1 = System.currentTimeMillis();
     InfGraph graph = reasoner.bind(input.getGraph());
     boolean correct = !graph.validate().isValid();
     long t2 = System.currentTimeMillis();
     lastTestDuration = t2 - t1;
     return correct;
   } else if (test.hasProperty(RDF.type, OWLTest.ConsistencyTest)) {
     // Not used normally becase we are not complete enough to prove consistency
     //            System.out.println("Starting: " + test);
     Model input = getDoc(test, RDFTest.inputDocument);
     long t1 = System.currentTimeMillis();
     InfGraph graph = reasoner.bind(input.getGraph());
     boolean correct = graph.validate().isValid();
     long t2 = System.currentTimeMillis();
     lastTestDuration = t2 - t1;
     return correct;
   } else {
     for (StmtIterator i = test.listProperties(RDF.type); i.hasNext(); ) {
       System.out.println("Test type = " + i.nextStatement().getObject());
     }
     throw new ReasonerException("Unknown test type");
   }
 }
 protected static void addRangeTypes(Model result, Model schema) {
   Model toAdd = ModelFactory.createDefaultModel();
   for (StmtIterator it = schema.listStatements(ANY, RDFS.range, ANY); it.hasNext(); ) {
     Statement s = it.nextStatement();
     RDFNode type = s.getObject();
     Property property = s.getSubject().as(Property.class);
     for (StmtIterator x = result.listStatements(ANY, property, ANY); x.hasNext(); ) {
       RDFNode ob = x.nextStatement().getObject();
       if (ob.isResource()) toAdd.add((Resource) ob, RDF.type, type);
     }
   }
   result.add(toAdd);
 }
 /** Load the premises or conclusions for the test, optional performing import processing. */
 public Model getDoc(Resource test, Property docType, boolean processImports) throws IOException {
   if (processImports) {
     Model result = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, null);
     StmtIterator si = test.listProperties(docType);
     while (si.hasNext()) {
       String fname = si.nextStatement().getObject().toString() + ".rdf";
       loadFile(fname, result);
     }
     return result;
   } else {
     return getDoc(test, docType);
   }
 }
 /** Initialize the result model. */
 public void initResults() {
   testResults = ModelFactory.createDefaultModel();
   jena2 = testResults.createResource(BASE_RESULTS_URI + "#jena2");
   jena2.addProperty(
       RDFS.comment,
       testResults.createLiteral(
           "<a xmlns=\"http://www.w3.org/1999/xhtml\" href=\"http://jena.sourceforce.net/\">Jena2</a> includes a rule-based inference engine for RDF processing, "
               + "supporting both forward and backward chaining rules. Its OWL rule set is designed to provide sound "
               + "but not complete instance resasoning for that fragment of OWL/Full limited to the OWL/lite vocabulary. In"
               + "particular it does not support unionOf/complementOf.",
           true));
   jena2.addProperty(RDFS.label, "Jena2");
   testResults.setNsPrefix("results", OWLResults.NS);
 }
 protected static void addSupertypes(Model result) {
   Model temp = ModelFactory.createDefaultModel();
   for (StmtIterator it = result.listStatements(ANY, RDF.type, ANY); it.hasNext(); ) {
     Statement s = it.nextStatement();
     Resource c = AssemblerHelp.getResource(s);
     for (StmtIterator subclasses = result.listStatements(c, RDFS.subClassOf, ANY);
         subclasses.hasNext(); ) {
       RDFNode type = subclasses.nextStatement().getObject();
       // System.err.println( ">> adding super type: subject " + s.getSubject() + ", type " + type
       // );
       temp.add(s.getSubject(), RDF.type, type);
     }
   }
   result.add(temp);
 }
Exemple #11
0
  private static ResultSetRewindable unique(ResultSetRewindable results) {
    // VERY crude.  Utilises the fact that bindings have value equality.
    List<Binding> x = new ArrayList<Binding>();
    Set<Binding> seen = new HashSet<Binding>();

    for (; results.hasNext(); ) {
      Binding b = results.nextBinding();
      if (seen.contains(b)) continue;
      seen.add(b);
      x.add(b);
    }
    QueryIterator qIter = new QueryIterPlainWrapper(x.iterator());
    ResultSet rs =
        new ResultSetStream(results.getResultVars(), ModelFactory.createDefaultModel(), qIter);
    return ResultSetFactory.makeRewindable(rs);
  }
  /**
   * Check that a predicate for which no shortnames are defined in name map still gets a term
   * binding in the metadata.
   */
  @Test
  public void testTermBindingsCoverAllPredicates() throws URISyntaxException {
    Resource thisPage = ResourceFactory.createResource("elda:thisPage");
    String pageNumber = "1";
    Bindings cc = new Bindings();
    URI reqURI = new URI("");
    //
    EndpointDetails spec =
        new EndpointDetails() {

          @Override
          public boolean isListEndpoint() {
            return true;
          }

          @Override
          public boolean hasParameterBasedContentNegotiation() {
            return false;
          }
        };
    EndpointMetadata em = new EndpointMetadata(spec, thisPage, pageNumber, cc, reqURI);
    //
    PrefixMapping pm =
        PrefixMapping.Factory.create().setNsPrefix("this", "http://example.com/root#");
    Model toScan = ModelIOUtils.modelFromTurtle(":a <http://example.com/root#predicate> :b.");
    toScan.setNsPrefixes(pm);
    Resource predicate = toScan.createProperty("http://example.com/root#predicate");
    Model meta = ModelFactory.createDefaultModel();
    Resource exec = meta.createResource("fake:exec");
    ShortnameService sns = new StandardShortnameService();
    //		APIEndpoint.Request r = new APIEndpoint.Request( new Controls(), reqURI, cc );

    CompleteContext c =
        new CompleteContext(CompleteContext.Mode.PreferPrefixes, sns.asContext(), pm)
            .include(toScan);

    em.addTermBindings(toScan, meta, exec, c);

    @SuppressWarnings("unused")
    Map<String, String> termBindings = c.Do();
    Resource tb = meta.listStatements(null, API.termBinding, Any).nextStatement().getResource();
    assertTrue(meta.contains(tb, API.label, "this_predicate"));
    assertTrue(meta.contains(tb, API.property, predicate));
  }
  /** Harvests all the triplets from each URI in the @rdfUris list */
  private void harvestFromDumps() {
    for (String uri : rdfUris) {
      if (uri.isEmpty()) continue;

      logger.info("Harvesting uri [{}]", uri);

      Model model = ModelFactory.createDefaultModel();
      try {
        RDFDataMgr.read(model, uri.trim(), RDFLanguages.RDFXML);
        BulkRequestBuilder bulkRequest = client.prepareBulk();
        addModelToES(model, bulkRequest, true);
      } catch (RiotException re) {
        logger.error("Illegal xml character [{}]", re.getLocalizedMessage());
      } catch (Exception e) {
        logger.error(
            "Exception when harvesting url: {}. Details: {}", uri, e.getLocalizedMessage());
      }
    }
  }
 /** Load all of the known manifest files into a single model */
 public static Model loadAllTestDefinitions() {
   System.out.print("Loading manifests ");
   System.out.flush();
   Model testDefs = ModelFactory.createDefaultModel();
   int count = 0;
   for (String TEST_DIR : TEST_DIRS) {
     File dir = new File(BASE_TESTDIR + TEST_DIR);
     String[] manifests =
         dir.list(
             new FilenameFilter() {
               @Override
               public boolean accept(File df, String name) {
                 if (name.startsWith("Manifest") && name.endsWith(".rdf")) {
                   return includeModified || !name.endsWith("-mod.rdf");
                 } else {
                   return false;
                 }
               }
             });
     for (String manifest : manifests) {
       File mf = new File(dir, manifest);
       try {
         testDefs.read(new FileInputStream(mf), "file:" + mf);
         count++;
         if (count % 8 == 0) {
           System.out.print(".");
           System.out.flush();
         }
       } catch (FileNotFoundException e) {
         System.out.println("File not readable - " + e);
       }
     }
   }
   System.out.println("loaded");
   return testDefs;
 }
Exemple #15
0
 @Test
 public void testInfoStamp() {
   String versionString = "E3.14159", commentString = "gloopSmurfale";
   String resourceString =
       "_x eye:assumed 'ABC'; _x eye:checked 'DEF'; _x eye:version '%v'; _x eye:comment '%c'"
           .replaceAll("%v", versionString)
           .replaceAll("%c", commentString);
   InfoStamp i = new InfoStamp(resourceInModel(resourceString));
   Calendar now = Calendar.getInstance();
   Resource root = i.stamp(now);
   Model stamp = root.getModel();
   Literal dateLiteral = ModelFactory.createDefaultModel().createTypedLiteral(now);
   String dateString = "'" + dateLiteral.getLexicalForm() + "'" + dateLiteral.getDatatypeURI();
   String expectedFormat =
       "[eye:assumed 'ABC' & eye:checked 'DEF' & eye:dated <date>"
           + " & eye:comment '<comment>' & eye:version '<version>']";
   String expectedString =
       expectedFormat
           .replaceAll("<date>", dateString)
           .replaceAll("<version>", versionString)
           .replaceAll("<comment>", commentString);
   Model expected = model(expectedString);
   assertIsoModels(expected, stamp);
 }
Exemple #16
0
  /**
   * This section is written to handle the inheritance between the interfaces. The models supports
   * multiple inheritance.
   *
   * @param type The given {@link Model} interface.
   * @parma impl The {@link ModelProxy} is the proxy implementation for <code>type</code>. It will
   *     be modified after the execution of <code>initSupers</code>.
   */
  protected void initSupers(Class type, ModelProxy imp) {
    List<Class> sC = getSupers(type);

    /* Creates the superclass instances */
    for (Class i : sC) if (!imp.supers.containsKey(i)) imp.supers.put(i, owner.create(i));
  }
  private Resource createMetadata(final boolean isListEndpoint, Integer totalResults)
      throws URISyntaxException {
    Model objectModel = ModelFactory.createDefaultModel();
    MergedModels mergedModels = new MergedModels(objectModel);
    //
    Model meta = mergedModels.getMetaModel();
    //
    Resource thisMetaPage = meta.createResource("eh:/thisMetaPage");
    Resource SEP = meta.createResource("eh:/sparqlEndpoint");
    thisMetaPage.addProperty(API.sparqlEndpoint, SEP);

    Bindings bindings = new Bindings();
    URI ru = new URI(thisMetaPage.getURI());
    Resource uriForDefinition = objectModel.createResource(thisMetaPage.getURI());
    boolean suppressIPTO = true;
    int page = 1, perPage = 10;
    boolean hasMorePages = true;
    Context context = new Context();
    CompleteContext cc = new CompleteContext(Mode.PreferLocalnames, context, objectModel);
    //
    SetsMetadata setsMeta =
        new SetsMetadata() {

          @Override
          public void setMetadata(String type, Model meta) {}
        };
    WantsMetadata wantsMeta =
        new WantsMetadata() {

          @Override
          public boolean wantsMetadata(String name) {
            return true;
          }
        };
    //
    Map<String, View> views = new HashMap<String, View>();
    Set<FormatNameAndType> formats = new HashSet<FormatNameAndType>();
    //
    EndpointDetails details =
        new EndpointDetails() {

          @Override
          public boolean isListEndpoint() {
            return isListEndpoint;
          }

          @Override
          public boolean hasParameterBasedContentNegotiation() {
            return false;
          }
        };
    //
    EndpointMetadata.addAllMetadata(
        mergedModels,
        ru,
        uriForDefinition,
        bindings,
        cc,
        suppressIPTO,
        thisMetaPage,
        page,
        perPage,
        totalResults,
        hasMorePages,
        CollectionUtils.list(objectModel.createResource("eh:/item/_1")),
        setsMeta,
        wantsMeta,
        "SELECT",
        "VIEW",
        new TestCaches.FakeSource("Nemos"),
        views,
        formats,
        details);
    return thisMetaPage;
  }
  /**
   * Starts a harvester with predefined queries to synchronize with the changes from the SPARQL
   * endpoint
   */
  public boolean sync() {
    logger.info("Sync resources newer than {}", startTime);

    String rdfQueryTemplate =
        "PREFIX xsd:<http://www.w3.org/2001/XMLSchema#> "
            + "SELECT DISTINCT ?resource WHERE { "
            + " GRAPH ?graph { %s }"
            + " ?graph <%s> ?time .  %s "
            + " FILTER (?time > xsd:dateTime(\"%s\")) }";

    String queryStr =
        String.format(
            rdfQueryTemplate, syncConditions, syncTimeProp, graphSyncConditions, startTime);
    Set<String> syncUris = executeSyncQuery(queryStr, "resource");

    if (syncUris == null) {
      logger.error("Errors occurred during sync procedure. Aborting!");
      return false;
    }

    /**
     * If desired, query for old data that has the sync conditions modified
     *
     * <p>This option is useful in the case in which the application indexes resources that match
     * some conditions. In this case, if they are modified and no longer match the initial
     * conditions, they will not be synchronized. When syncOldData is True, the modified resources
     * that no longer match the conditions are deleted.
     */
    int deleted = 0;
    int count = 0;
    if (this.syncOldData) {
      SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
      queryStr =
          String.format(
              rdfQueryTemplate,
              syncConditions,
              syncTimeProp,
              graphSyncConditions,
              sdf.format(new Date(0)));

      HashSet<String> allIndexURIs = executeSyncQuery(queryStr, "resource");

      if (allIndexURIs == null) {
        logger.error("Errors occurred during modified content sync query. Aborting!");
        return false;
      }

      deleted = removeMissingUris(allIndexURIs);
    }

    /* Prepare a series of bulk uris to be described so we can make
     * a smaller number of calls to the SPARQL endpoint. */
    ArrayList<ArrayList<String>> bulks = new ArrayList<ArrayList<String>>();
    ArrayList<String> currentBulk = new ArrayList<String>();

    for (String uri : syncUris) {
      currentBulk.add(uri);

      if (currentBulk.size() == EEASettings.DEFAULT_BULK_SIZE) {
        bulks.add(currentBulk);
        currentBulk = new ArrayList<String>();
      }
    }

    if (currentBulk.size() > 0) {
      bulks.add(currentBulk);
    }

    /* Execute RDF queries for the resources in each bulk */
    for (ArrayList<String> bulk : bulks) {
      String syncQuery = getSyncQueryStr(bulk);

      try {
        Query query = QueryFactory.create(syncQuery);
        QueryExecution qExec = QueryExecutionFactory.sparqlService(rdfEndpoint, query);
        try {
          Model constructModel = ModelFactory.createDefaultModel();
          qExec.execConstruct(constructModel);
          BulkRequestBuilder bulkRequest = client.prepareBulk();

          /**
           * When adding the model to ES do not use toDescribeURIs as the query already returned the
           * correct labels.
           */
          addModelToES(constructModel, bulkRequest, false);
          count += bulk.size();
        } catch (Exception e) {
          logger.error("Error while querying for modified content. {}", e.getLocalizedMessage());
          return false;
        } finally {
          qExec.close();
        }
      } catch (QueryParseException qpe) {
        logger.warn(
            "Could not parse Sync query. Please provide a relevant query. {}",
            qpe.getLocalizedMessage());
        return false;
      }
    }
    logger.info(
        "Finished synchronisation: Deleted {}, Updated {}/{}", deleted, count, syncUris.size());
    return true;
  }
 /**
  * Query SPARQL endpoint with a CONSTRUCT query
  *
  * @param qExec QueryExecution encapsulating the query
  * @return model retrieved by querying the endpoint
  */
 private Model getConstructModel(QueryExecution qExec) {
   return qExec.execConstruct(ModelFactory.createDefaultModel());
 }
 /**
  * Query SPARQL endpoint with a DESCRIBE query
  *
  * @param qExec QueryExecution encapsulating the query
  * @return model retrieved by querying the endpoint
  */
 private Model getDescribeModel(QueryExecution qExec) {
   return qExec.execDescribe(ModelFactory.createDefaultModel());
 }
  public static void main(String[] args) {

    List<String> obj = new ArrayList<String>();

    Scanner input = new Scanner(System.in);

    System.out.print("Enter URI: ");

    String userIn = input.nextLine();

    // create an empty Model
    Model model = ModelFactory.createDefaultModel();

    // read the RDF/XML file
    model.read(userIn);

    // write it to standard out
    // model.write(System.out);

    // list the statements in the Model
    StmtIterator iter = model.listStatements();

    System.out.println();

    // print out the predicate, subject and object of each statement
    while (iter.hasNext()) {
      Statement stmt = iter.nextStatement(); // get next statement
      Resource subject = stmt.getSubject(); // get the subject
      Property predicate = stmt.getPredicate(); // get the predicate
      RDFNode object = stmt.getObject(); // get the object

      System.out.print(subject.toString());
      System.out.print(" -> " + predicate.toString() + " -> ");
      if (object instanceof Resource) {
        System.out.print(object.toString() + "\n");
      } else {
        // object is a literal
        System.out.print(" \"" + object.toString() + "\"\n");
      }
    }

    /* for(int i = 0; i < (obj.size()); i++){

    	String sparqlQueryString1=
    								"SELECT ?s ?o "+
    								"WHERE {"+
    								"?s ?p ?o ."+
    								"?o <bif:contains> \""+obj.get(i)+"\" ."+
    								"}"+
    								"limit 10";

    		      Query query = QueryFactory.create(sparqlQueryString1);
    		      QueryExecution qexec = QueryExecutionFactory.sparqlService("http://pubmed.bio2rdf.org/sparql", query);

    		      ResultSet results = qexec.execSelect();
    		      System.out.println("Query: "+obj.get(i));
    		      ResultSetFormatter.out(System.out, results, query);

    		     qexec.close() ;
    } */

  }
 public void testBulkByModel() {
   testBulkByModel(ModelFactory.createDefaultModel());
 }
 public void testMBU() {
   testMBU(ModelFactory.createDefaultModel());
 }
 /**
  * To each subclass X of <code>parents.item</code> add as superclass all the classes between X and
  * that item and all the items in the rest of <code>parents</code>.
  */
 private static void addSuperClasses(Model m, LinkedSeq parents) {
   Model toAdd = ModelFactory.createDefaultModel();
   addSuperClasses(m, parents, toAdd);
   m.add(toAdd);
 }