public void testBulkByModel(Model m) {
   assertEquals("precondition: model must be empty", 0, m.size());
   Model A = modelWithStatements("clouds offer rain; trees offer shelter");
   Model B = modelWithStatements("x R y; y Q z; z P x");
   m.add(A);
   assertIsoModels(A, m);
   m.add(B);
   m.remove(A);
   assertIsoModels(B, m);
   m.remove(B);
   assertEquals("", 0, m.size());
 }
 private boolean is200AnRDF(CachedHTTPResource resource) {
   if (resource.isContainsRDF() != null) return resource.isContainsRDF();
   if (resource != null && resource.getResponses() != null) {
     for (SerialisableHttpResponse response : resource.getResponses()) {
       if (response != null && response.getHeaders("Content-Type") != null) {
         if (LinkedDataContent.contentTypes.contains(response.getHeaders("Content-Type"))) {
           if (response.getHeaders("Content-Type").equals(WebContent.contentTypeTextPlain)) {
             Model m = this.tryRead(resource.getUri());
             if (m != null && m.size() == 0) {
               this.createProblemQuad(resource.getUri(), DQM.SC200WithoutRDF);
               resource.setContainsRDF(false);
               return false;
             }
           }
           this.createProblemQuad(resource.getUri(), DQM.SC200WithRDF);
           resource.setContainsRDF(true);
           return true;
         }
       }
     }
   }
   this.createProblemQuad(resource.getUri(), DQM.SC200WithoutRDF);
   resource.setContainsRDF(false);
   return false;
 }
  private void generateSample() {
    logger.info("Generating sample...");
    sample = ModelFactory.createDefaultModel();

    // we have to set up a new query execution factory working on our local model
    qef = new QueryExecutionFactoryModel(sample);
    reasoner = new SPARQLReasoner(qef);

    // get the page size
    // TODO put to base class
    long pageSize = 10000; // PaginationUtils.adjustPageSize(globalQef, 10000);

    ParameterizedSparqlString sampleQueryTemplate = getSampleQuery();
    sampleQueryTemplate.setIri("p", entityToDescribe.toStringID());
    Query query = sampleQueryTemplate.asQuery();
    query.setLimit(pageSize);

    boolean isEmpty = false;
    int i = 0;
    while (!isTimeout() && !isEmpty) {
      // get next sample
      logger.debug("Extending sample...");
      query.setOffset(i++ * pageSize);
      QueryExecution qe = ksQef.createQueryExecution(query);
      Model tmp = qe.execConstruct();
      sample.add(tmp);

      // if last call returned empty model, we can leave loop
      isEmpty = tmp.isEmpty();
    }
    logger.info("...done. Sample size: " + sample.size() + " triples");
  }
 public void notifyEvent(Model arg0, Object arg1) {
   if (arg1 instanceof EditEvent) {
     EditEvent ee = (EditEvent) arg1;
     if (!ee.getBegin()) {
       if ((additionModel.size() > 0) || (removalModel.size() > 0)) {
         if (!isSynchronizing) {
           if (foreground) {
             (new PelletSynchronizer()).run();
           } else {
             new Thread(new PelletSynchronizer(), "PelletListener.PelletSynchronizer").start();
           }
         }
       }
     }
   }
 }
 private long doExecuteSparql(VitroRequest vreq) {
   OntModel jenaOntModel = ModelAccess.on(getServletContext()).getOntModel();
   OntModel source = ModelFactory.createOntologyModel(OntModelSpec.OWL_DL_MEM);
   String[] sourceModel = vreq.getParameterValues("sourceModelName");
   for (int i = 0; i < sourceModel.length; i++) {
     Model m = getModel(sourceModel[i], vreq);
     source.addSubModel(m);
   }
   Model destination = getModel(vreq.getParameter("destinationModelName"), vreq);
   String sparqlQueryStr = vreq.getParameter("sparqlQueryStr");
   String savedQueryURIStr = vreq.getParameter("savedQuery");
   String queryStr;
   if (savedQueryURIStr.length() == 0) {
     log.debug("Using entered query");
     queryStr = sparqlQueryStr;
   } else {
     Property queryStrProp = ResourceFactory.createProperty(SPARQL_QUERYSTR_PROP);
     jenaOntModel.enterCriticalSection(Lock.READ);
     try {
       Individual ind = jenaOntModel.getIndividual(savedQueryURIStr);
       log.debug("Using query " + savedQueryURIStr);
       queryStr = ((Literal) ind.getPropertyValue(queryStrProp)).getLexicalForm();
       queryStr =
           StringEscapeUtils.unescapeHtml(
               queryStr); // !!! We need to turn off automatic HTML-escaping for data property
                          // editing.
     } finally {
       jenaOntModel.leaveCriticalSection();
     }
   }
   Model tempModel = ModelFactory.createDefaultModel();
   Query query = SparqlQueryUtils.create(queryStr);
   QueryExecution qexec = QueryExecutionFactory.create(query, source);
   try {
     qexec.execConstruct(tempModel);
   } catch (QueryExecException qee) {
     qexec.execDescribe(tempModel);
   }
   destination.enterCriticalSection(Lock.WRITE);
   try {
     if (destination instanceof OntModel) {
       ((OntModel) destination).getBaseModel().notifyEvent(new EditEvent(null, true));
     } else {
       destination.notifyEvent(new EditEvent(null, true));
     }
     destination.add(tempModel);
   } finally {
     if (destination instanceof OntModel) {
       ((OntModel) destination).getBaseModel().notifyEvent(new EditEvent(null, false));
     } else {
       destination.notifyEvent(new EditEvent(null, false));
     }
     destination.leaveCriticalSection();
   }
   return tempModel.size();
 }
示例#6
0
  public void printResults(DataSet datastore) {
    cleanResults(datastore);
    System.out.println("============================================================");
    System.out.println("RESULTS");
    System.out.println("============================================================");
    System.out.println("Gold Standard:");
    goldStandard.printStats();
    System.out.println();
    System.out.println("============================================================");
    System.out.println("Result of the Triple Equality Compariosn: ");
    tripleTEC.printStats();
    System.out.println();
    System.out.println("============================================================");
    System.out.println("Result of the Predicate Neutrality Comparison: ");
    triplePNC.printStats();
    System.out.println();
    System.out.println("============================================================");
    System.out.println("Result of the Object Similarity Comparison: ");
    tripleOSC.printStats();
    System.out.println();
    System.out.println("------------------------------");
    System.out.println("Wrong extracted triples: ");
    tripleOSCwrong.printStats();
    System.out.println();
    System.out.println("============================================================");
    System.out.println("Snippet results:");
    System.out.println("============================================================");
    System.out.println("Result of the Snippet Triple Equality Comparison: ");
    snippetTEC.printStats();
    System.out.println("============================================================");
    System.out.println("Result of the Snippet Predicate Neutrality Comparison: ");
    snippetPNC.printStats();
    System.out.println("============================================================");
    System.out.println("Result of the Snippet Object Similarity comparison: ");
    snippetOSC.printStats();

    //        List<Snippet> snippets = snippetTEC.getSnippets(PatternCategory.PlainProperty);
    //        System.out.println("snippetTEC.noClass: " + snippets.size());
    //        for (Snippet snippet : snippets)
    //        {
    //            //System.out.println(snippet.getSource());
    //            System.out.println("TRIPLES:" + snippet.getTriple());
    //        }

    Model triples = tripleOSCwrong.present.internalTemplates.getTriples();
    System.out.println("tripleOSCwrong.present.internalTemplates: " + triples.size());

    for (StmtIterator iterator = triples.listStatements(); iterator.hasNext(); ) {
      Statement statement = iterator.nextStatement();
      Evaluator.printStatement(statement, null);
      System.out.println();
    }

    // System.out.println(tripleTEC.getTripleCSV());
  }
 protected Model executeConstructQuery(String query) {
   logger.trace("Sending query\n{} ...", query);
   QueryExecution qe = qef.createQueryExecution(query);
   try {
     Model model = qe.execConstruct();
     timeout = false;
     if (model.size() == 0) {
       fullDataLoaded = true;
     }
     logger.debug("Got " + model.size() + " triples.");
     return model;
   } catch (QueryExceptionHTTP e) {
     if (e.getCause() instanceof SocketTimeoutException) {
       logger.warn("Got timeout");
     } else {
       logger.error("Exception executing query", e);
     }
     return ModelFactory.createDefaultModel();
   }
 }
  public boolean doGet(
      MappedResource resource,
      Property property,
      boolean isInverse,
      HttpServletRequest request,
      HttpServletResponse response,
      Configuration config)
      throws IOException {
    Model descriptions = getAnonymousPropertyValues(resource, property, isInverse);
    if (descriptions.size() == 0) {
      return false;
    }

    Resource r = descriptions.getResource(resource.getWebURI());
    List resourceDescriptions = new ArrayList();
    StmtIterator it =
        isInverse ? descriptions.listStatements(null, property, r) : r.listProperties(property);
    while (it.hasNext()) {
      Statement stmt = it.nextStatement();
      RDFNode value = isInverse ? stmt.getSubject() : stmt.getObject();
      if (!value.isAnon()) {
        continue;
      }
      resourceDescriptions.add(
          new ResourceDescription((Resource) value.as(Resource.class), descriptions, config));
    }

    Model description = getResourceDescription(resource);
    ResourceDescription resourceDescription =
        new ResourceDescription(resource, description, config);

    String title =
        resourceDescription.getLabel()
            + (isInverse ? " ? " : " ? ")
            + config.getPrefixes().getNsURIPrefix(property.getNameSpace())
            + ":"
            + property.getLocalName();
    VelocityHelper template = new VelocityHelper(getServletContext(), response);
    Context context = template.getVelocityContext();
    context.put("project_name", config.getProjectName());
    context.put("project_link", config.getProjectLink());
    context.put("title", title);
    context.put("server_base", config.getWebApplicationBaseURI());
    context.put("sparql_endpoint", resource.getDataset().getDataSource().getEndpointURL());
    context.put("back_uri", resource.getWebURI());
    context.put("back_label", resourceDescription.getLabel());
    context.put(
        "rdf_link",
        isInverse ? resource.getInversePathDataURL(property) : resource.getPathDataURL(property));
    context.put("resources", resourceDescriptions);
    template.renderXHTML("pathpage.vm");
    return true;
  }
 public void doCleanLiterals(Model model) {
   Model retractionsModel = ModelFactory.createDefaultModel();
   Model additionsModel = ModelFactory.createDefaultModel();
   model.enterCriticalSection(Lock.WRITE);
   try {
     ClosableIterator<Statement> closeIt = model.listStatements();
     try {
       for (Iterator<Statement> stmtIt = closeIt; stmtIt.hasNext(); ) {
         Statement stmt = stmtIt.next();
         if (stmt.getObject().isLiteral()) {
           Literal lit = (Literal) stmt.getObject();
           String lex = lit.getLexicalForm();
           char[] chars = lex.toCharArray();
           char[] cleanChars = new char[chars.length];
           int cleanPos = 0;
           boolean badChar = false;
           for (int i = 0; i < chars.length; i++) {
             if (java.lang.Character.getNumericValue(chars[i]) > 31
                 && java.lang.Character.isDefined(chars[i])) {
               cleanChars[cleanPos] = chars[i];
               cleanPos++;
             } else {
               log.error("Bad char in " + lex);
               log.error("Numeric value " + java.lang.Character.getNumericValue(chars[i]));
               badChar = true;
             }
           }
           String cleanLex = new String(cleanChars);
           if (badChar) {
             retractionsModel.add(stmt);
             Literal newLit = null;
             if (lit.getLanguage() != null && lit.getLanguage().length() > 0) {
               newLit = additionsModel.createLiteral(cleanLex, lit.getLanguage());
             } else if (lit.getDatatype() != null) {
               newLit = additionsModel.createTypedLiteral(cleanLex, lit.getDatatype());
             } else {
               newLit = additionsModel.createLiteral(cleanLex);
             }
             additionsModel.add(stmt.getSubject(), stmt.getPredicate(), newLit);
           }
         }
       }
     } finally {
       closeIt.close();
     }
     model.remove(retractionsModel);
     model.add(additionsModel);
     log.debug("Cleaned " + additionsModel.size() + " literals");
   } finally {
     model.leaveCriticalSection();
   }
 }
示例#10
0
  private static long benchmarkSemarglJena(File path)
      throws FileNotFoundException, SAXException, ParseException {
    System.out.println("Semargl-Jena benchmark");
    Model model = ModelFactory.createDefaultModel();

    StreamProcessor streamProcessor =
        new StreamProcessor(RdfaParser.connect(JenaSink.connect(model)));

    List<File> files = listFiles(path);
    long time = System.nanoTime();
    for (File file : files) {
      streamProcessor.process(new FileReader(file), HTTP_EXAMPLE_COM);
    }
    System.out.println("Model size = " + model.size());
    return System.nanoTime() - time;
  }
示例#11
0
 private int coalesce() {
   int infCnt = 0;
   // coalesce
   for (String tf : mwMap.keySet()) {
     int cnt = coalesceParmsMap.get(tf);
     CircularFifoQueue<ModelWrapper> lmw = mwMap.get(tf);
     int size = lmw.size();
     if (size >= cnt)
       for (int j = size - 1; j > size - 1 - cnt; j--) {
         Model m = Sparql.queryDescribe(lmw.get(j).get(), syphonQuery);
         ruleEngine.getModel().add(m);
         infCnt += m.size();
       }
   }
   return infCnt;
 }
  public static void main(String[] args) throws IOException {
    Model model = ModelFactory.createDefaultModel();
    try (InputStream is =
        new BZip2CompressorInputStream(new URL(DBPEDIA_SCHEMA_DOWNLOAD_URL).openStream())) {
      //			model = FileManager.get().loadModel("input/dbpedia_3.9.owl");
      model.read(is, null, "RDF/XML");
    }

    System.out.println(model.size() + " triples loaded.");

    Set<Resource> classes = subjects(model, RDF.type, OWL.Class);
    Set<Resource> objectProperties = subjects(model, RDF.type, OWL.ObjectProperty);
    Set<Resource> dataProperties = subjects(model, RDF.type, OWL.DatatypeProperty);

    Map<Set<Resource>, String> setToName = new HashMap<>();
    setToName.put(classes, "classes");
    setToName.put(objectProperties, "objectproperties");
    setToName.put(dataProperties, "dataproperties");

    FieldType stringType = new FieldType(StringField.TYPE_STORED);
    stringType.setStoreTermVectors(false);
    FieldType textType = new FieldType(TextField.TYPE_STORED);
    textType.setStoreTermVectors(false);

    for (Set<Resource> set : setToName.keySet()) {
      IndexWriter writer = createWriter(setToName.get(set));
      Set<Document> documents = new HashSet<>();

      for (Resource resource : set) {
        for (RDFNode object : model.listObjectsOfProperty(resource, RDFS.label).toSet()) {
          String label = object.asLiteral().getLexicalForm();

          Document luceneDocument = new Document();
          luceneDocument.add(new Field("uri", resource.getURI(), stringType));
          //					luceneDocument.add(new Field("dbpediaUri", indexDocument.getCanonicalDBpediaUri(),
          // stringType));
          luceneDocument.add(new Field("label", label, textType));
          //					documents.add(luceneDocument);
          writer.addDocument(luceneDocument);
        }
      }
      writer.addDocuments(documents);
      writer.commit();
      writer.close();
    }
  }
示例#13
0
 /**
  * Returns the number of statements in the graph.
  *
  * <p>XXX AT: this size may not be equal to the number of statements retrieved via getStatements()
  * because it counts each statement property.
  *
  * @return integer number of statements in the graph
  */
 @Override
 public Long size() {
   Model graph = null;
   GraphConnection graphConnection = null;
   try {
     graphConnection = openGraph();
     graph = graphConnection.getGraph();
     graph.enterCriticalSection(Lock.READ);
     return graph.size();
   } finally {
     if (graph != null) {
       graph.leaveCriticalSection();
     }
     if (graphConnection != null) {
       graphConnection.close();
     }
   }
 }
  private void runSPARQL1_0_Mode() {
    Model model = ModelFactory.createDefaultModel();
    int limit = 1000;
    int offset = 0;
    String baseQuery = "CONSTRUCT {?s ?p ?o.} WHERE {?s <%s> ?o. ?s ?p ?o.} LIMIT %d OFFSET %d";
    String query = String.format(baseQuery, propertyToDescribe.getName(), limit, offset);
    Model newModel = executeConstructQuery(query);
    Map<DatatypeProperty, Integer> result = new HashMap<DatatypeProperty, Integer>();
    while (!terminationCriteriaSatisfied() && newModel.size() != 0) {
      model.add(newModel);
      query = "SELECT ?p (COUNT(?s) AS ?count) WHERE {?s ?p ?o.} GROUP BY ?p";

      DatatypeProperty prop;
      Integer oldCnt;
      ResultSet rs = executeSelectQuery(query, model);
      QuerySolution qs;
      while (rs.hasNext()) {
        qs = rs.next();
        prop = new DatatypeProperty(qs.getResource("p").getURI());
        int newCnt = qs.getLiteral("count").getInt();
        oldCnt = result.get(prop);
        if (oldCnt == null) {
          oldCnt = Integer.valueOf(newCnt);
        }
        result.put(prop, oldCnt);
        qs.getLiteral("count").getInt();
      }
      if (!result.isEmpty()) {
        currentlyBestAxioms = buildAxioms(result);
      }

      offset += limit;
      query = String.format(baseQuery, propertyToDescribe.getName(), limit, offset);
      newModel = executeConstructQuery(query);
    }
  }
 public void explainScore(EvaluatedAxiom<T> evAxiom) {
   AxiomScore score = evAxiom.getScore();
   int posExampleCnt = score.getNrOfPositiveExamples();
   int negExampleCnt = score.getNrOfNegativeExamples();
   int total = posExampleCnt + negExampleCnt;
   StringBuilder sb = new StringBuilder();
   String lb = "\n";
   sb.append("######################################").append(lb);
   sb.append("Explanation:").append(lb);
   sb.append("Score(")
       .append(evAxiom.getAxiom())
       .append(") = ")
       .append(evAxiom.getScore().getAccuracy())
       .append(lb);
   sb.append("Total number of resources:\t").append(total).append(lb);
   sb.append("Number of positive examples:\t").append(posExampleCnt).append(lb);
   sb.append("Number of negative examples:\t").append(negExampleCnt).append(lb);
   sb.append("Based on sample:            \t").append(score.isSampleBased()).append(lb);
   if (sample != null) {
     sb.append("Sample size(#triples):      \t").append(sample.size()).append(lb);
   }
   sb.append("######################################");
   System.out.println(sb.toString());
 }
  public PelletListener(
      OntModel fullModel,
      OntModel model,
      Model inferenceModel,
      ReasonerConfiguration reasonerConfiguration,
      boolean foreground,
      boolean skipReasoningUponInitialization) {
    this.pelletModel = ModelFactory.createOntologyModel(reasonerConfiguration.getOntModelSpec());
    this.fullModel = fullModel;
    this.mainModel = model;
    this.inferenceModel = inferenceModel;
    if (this.inferenceModel == null) {
      log.trace("Inference model is null");
    }
    this.reasonerConfiguration = reasonerConfiguration;
    this.inferenceDrivingPatternAllowSet =
        reasonerConfiguration.getInferenceDrivingPatternAllowSet();
    this.inferenceDrivingPatternDenySet = reasonerConfiguration.getInferenceDrivingPatternDenySet();
    this.inferenceReceivingPatternAllowSet =
        reasonerConfiguration.getInferenceReceivingPatternAllowSet();

    if (this.inferenceDrivingPatternAllowSet != null) {
      this.inferenceDrivingPatternMap =
          new HashMap<Property, List<ObjectPropertyStatementPattern>>();
      for (Iterator<ObjectPropertyStatementPattern> i = inferenceDrivingPatternAllowSet.iterator();
          i.hasNext(); ) {
        ObjectPropertyStatementPattern pat = i.next();
        Property p = pat.getPredicate();
        List<ObjectPropertyStatementPattern> patList = inferenceDrivingPatternMap.get(p);
        if (patList == null) {
          patList = new LinkedList<ObjectPropertyStatementPattern>();
          patList.add(pat);
          inferenceDrivingPatternMap.put(p, patList);
        } else {
          patList.add(pat);
        }
      }
    }
    this.pipeOpen = true;
    this.additionModel = ModelFactory.createDefaultModel();
    this.removalModel = ModelFactory.createDefaultModel();
    this.deletedObjectProperties = ModelFactory.createDefaultModel();
    this.deletedDataProperties = ModelFactory.createDefaultModel();
    this.mainModel.enterCriticalSection(Lock.READ);
    try {
      for (ObjectPropertyStatementPattern pat : this.inferenceDrivingPatternAllowSet) {
        addedStatements(
            mainModel.listStatements((Resource) null, pat.getPredicate(), (RDFNode) null));
      }
      if (!skipReasoningUponInitialization) {
        this.foreground = foreground;
        notifyEvent(null, new EditEvent(null, false));
      } else if (inferenceModel.size() == 0) {
        foreground = true;
        notifyEvent(null, new EditEvent(null, false));
        this.foreground = foreground;
      }
    } finally {
      this.mainModel.leaveCriticalSection();
    }

    this.fullModel.getBaseModel().register(this);
    this.mainModel.getBaseModel().register(this);
  }
  /**
   * See OWLRLExample in spin-examples-1.2.0.jar
   *
   * <p>Currently limited to adding the resulting triples from the spin reasoning to the repository
   *
   * <p>TODO: add more modes, such as delete matching, add matching, only return matching triples
   * etc.
   *
   * @param inputRepository The OpenRDF repository to use for the input triples
   * @throws QueryAllException
   */
  public Repository processSpinRules(
      final Repository inputRepository, final org.openrdf.model.Resource... contexts)
      throws QueryAllException {
    // Load domain model with imports
    // System.out.println("Loading domain ontology...");
    // OntModel queryModel =
    // loadModelWithImports("http://www.co-ode.org/ontologies/pizza/2007/02/12/pizza.owl");
    SpinInferencingRuleImpl.log.info("Loading jena model from sesame repository");
    final OntModel queryModel =
        SpinUtils.addSesameRepositoryToJenaModel(
            inputRepository,
            ModelFactory.createDefaultModel(ReificationStyle.Minimal),
            "http://spin.example.org/",
            contexts);

    // Create and add Model for inferred triples
    final Model newTriples = ModelFactory.createDefaultModel(ReificationStyle.Minimal);
    queryModel.addSubModel(newTriples);

    SpinInferencingRuleImpl.log.info("Loading ontologies...");

    // Register any new functions defined in OWL RL
    // NOTE: The source for these rules is given as "this" so that they can be retrieved in
    // future based on this object

    // Build one big union Model of everything
    final Graph[] graphs = new Graph[this.ontologyModels.size() + 1];

    graphs[0] = queryModel.getGraph();

    int i = 1;

    for (final OntModel nextModel : this.ontologyModels) {
      SpinInferencingRuleImpl.log.info("i=" + i + " nextModel.size()=" + nextModel.size());
      graphs[i++] = nextModel.getGraph();
    }

    final MultiUnion multiUnion = new MultiUnion(graphs);

    final Model unionModel = ModelFactory.createModelForGraph(multiUnion);

    final Set<Object> allowedRuleSources = new HashSet<Object>();

    allowedRuleSources.addAll(this.localImports);

    // Collect rules (and template calls) defined in OWL RL
    final Map<CommandWrapper, Map<String, RDFNode>> initialTemplateBindings =
        new HashMap<CommandWrapper, Map<String, RDFNode>>();
    final Map<Resource, List<CommandWrapper>> cls2Query =
        SPINQueryFinder.getClass2QueryMap(
            unionModel,
            queryModel,
            SPIN.rule,
            true,
            initialTemplateBindings,
            false,
            allowedRuleSources);
    final Map<Resource, List<CommandWrapper>> cls2Constructor =
        SPINQueryFinder.getClass2QueryMap(
            queryModel,
            queryModel,
            SPIN.constructor,
            true,
            initialTemplateBindings,
            false,
            allowedRuleSources);
    final SPINRuleComparator comparator = new DefaultSPINRuleComparator(queryModel);

    // Run all inferences
    SpinInferencingRuleImpl.log.info("Running SPIN inferences...");
    SPINInferences.run(
        queryModel,
        newTriples,
        cls2Query,
        cls2Constructor,
        initialTemplateBindings,
        null,
        null,
        false,
        SPIN.rule,
        comparator,
        null,
        allowedRuleSources);
    SpinInferencingRuleImpl.log.info("Inferred triples: " + newTriples.size());
    SpinInferencingRuleImpl.log.info("Query triples: " + queryModel.size());

    final StmtIterator listStatements = newTriples.listStatements();

    while (listStatements.hasNext()) {
      SpinInferencingRuleImpl.log.info(listStatements.next().toString());
    }

    // Note: To optimise the process, we only add the new triples back into the original
    // repository
    return SpinUtils.addJenaModelToSesameRepository(newTriples, inputRepository);
  }
 public void testBulkRemoveSelf() {
   Model m = modelWithStatements("they sing together; he sings alone");
   m.remove(m);
   assertEquals("", 0, m.size());
 }
 public long getEvaluatedFramentSize() {
   return sample.size();
 }
 public String toString() {
   return "RDFStore (Pellet): " + model.size() + " triples";
 }