// simualte the streaming data
 public boolean streamEmulation() throws IOException {
   String line = null;
   while (this.cacheContentOfGraphIds.size() < this.size) {
     AGGraph graph =
         this.graphMaker.createGraph("http://fefocacheeviction.org/graph" + this.graphID);
     for (int i = 0; i < this.numberOfTriples; ++i) {
       if ((line = this.br.readLine()) != null) {
         line = line.replaceAll("<", "");
         line = line.replaceAll(">", "");
         String[] parts = line.split(" ");
         Node s = NodeFactory.createURI(parts[0]);
         Node p = NodeFactory.createURI(parts[1]);
         if (parts[2].contains("http")) {
           Node o = NodeFactory.createURI(parts[2].substring(0, parts[2].length()));
           graph.add(new Triple(s, p, o));
         } else {
           Node o = NodeFactory.createLiteral(parts[2].substring(1, parts[2].length() - 1));
           graph.add(new Triple(s, p, o));
         }
       } else {
         return false;
       }
     }
     this.cacheContentOfGraphIds.add(
         new GraphIdCounterPair(
             "http://fefocacheeviction.org/graph" + (this.graphID++), LocalTime.now()));
     this.modelGraph = this.graphMaker.createUnion(this.modelGraph, graph);
   }
   return true;
 }
  /**
   * Query SPARQL endpoint with a SELECT query
   *
   * @param qExec QueryExecution encapsulating the query
   * @return model retrieved by querying the endpoint
   */
  private Model getSelectModel(QueryExecution qExec) {
    Model model = ModelFactory.createDefaultModel();
    Graph graph = model.getGraph();
    ResultSet results = qExec.execSelect();

    while (results.hasNext()) {
      QuerySolution sol = results.next();
      String subject;
      String predicate;
      RDFNode object;

      try {
        subject = sol.getResource("s").toString();
        predicate = sol.getResource("p").toString();
        object = sol.get("o");
      } catch (NoSuchElementException e) {
        logger.error("SELECT query does not return a (?s ?p ?o) Triple");
        continue;
      }

      Node objNode;
      if (object.isLiteral()) {
        Literal obj = object.asLiteral();
        objNode = NodeFactory.createLiteral(obj.getString(), obj.getDatatype());
      } else {
        objNode = NodeFactory.createLiteral(object.toString());
      }

      graph.add(
          new Triple(NodeFactory.createURI(subject), NodeFactory.createURI(predicate), objNode));
    }

    return model;
  }
 // See RDFParser
 private Node createNode(Map<String, Object> map) {
   String type = (String) map.get("type");
   String lex = (String) map.get("value");
   if (type.equals(IRI)) return NodeFactory.createURI(lex);
   else if (type.equals(BLANK_NODE)) return labels.get(null, lex);
   else if (type.equals(LITERAL)) {
     String lang = (String) map.get("language");
     String datatype = (String) map.get("datatype");
     if (lang == null && datatype == null) return NodeFactory.createLiteral(lex);
     if (lang != null) return NodeFactory.createLiteral(lex, lang, null);
     RDFDatatype dt = NodeFactory.getType(datatype);
     return NodeFactory.createLiteral(lex, dt);
   } else throw new InternalErrorException("Node is not a IRI, bNode or a literal: " + type);
   //        /*
   //     *  "value" : The value of the node.
   //     *            "subject" can be an IRI or blank node id.
   //     *            "predicate" should only ever be an IRI
   //     *            "object" can be and IRI or blank node id, or a literal value (represented as
   // a string)
   //     *  "type" : "IRI" if the value is an IRI or "blank node" if the value is a blank node.
   //     *           "object" can also be "literal" in the case of literals.
   //     * The value of "object" can  also contain the following optional key-value pairs:
   //     *  "language" : the language value of a string literal
   //     *  "datatype" : the datatype of the literal. (if not set will default to XSD:string, if
   // set to null, null will be used).         */
   //        System.out.println(map.get("value")) ;
   //        System.out.println(map.get("type")) ;
   //        System.out.println(map.get("language")) ;
   //        System.out.println(map.get("datatype")) ;
   //        return null ;
 }
public abstract class AbstractExplanationQueryBuilder<T extends AbstractExplanationQueryBuilder<T>>
    extends AbstractQueryBuilder<T> {

  protected static final Node PREDICATE = NodeFactory.createVariable("p"); // $NON-NLS-1$
  protected static final Node OBJECT = NodeFactory.createVariable("o"); // $NON-NLS-1$
  protected static final Node SOURCE = NodeFactory.createVariable("source"); // $NON-NLS-1$
  private String conceptUri;

  protected AbstractExplanationQueryBuilder(String conceptUri) {
    super();
    this.conceptUri = conceptUri;
  }

  protected String getConceptUri() {
    return conceptUri;
  }

  protected Node getConceptUriNode() {
    return NodeFactory.createURI(getConceptUri());
  }

  protected NodeValueNode getConceptUriExpr() {
    return new NodeValueNode(getConceptUriNode());
  }

  protected Expr createNoLiteralOrLanguageExpression() {
    return SparqlExpressionBuilder.use(not(createIsLiteralExpression(OBJECT)))
        .or(createMultipleOptionsExpression(new SparqlLanguageExpression(OBJECT), getLanguages()))
        .toExpr();
  }
}
 /**
  * Adds a profile restriction to the query.
  *
  * @param profile the profile
  * @return this query
  */
 public ComponentQuery addProfile(String profile) {
   if (profile != null && !profile.isEmpty()) {
     Triple t =
         new Triple(
             wfNode,
             NodeFactory.createURI(ONTOLOGY_IRI + "fits"),
             NodeFactory.createURI(profile));
     query.addTriplePattern(t);
   }
   return this;
 }
 /** Finishes the dependency label filter. */
 private void finishDependencyLabelFilter() {
   if (dependencyLabelPattern != null && !dependencyLabelPattern.isEmpty()) {
     Node processNode = NodeFactory.createAnon();
     Node installationNode = NodeFactory.createAnon();
     Node dependencyNode = NodeFactory.createAnon();
     Node dependencyLabel = NodeFactory.createVariable("dependencyLabel");
     ElementGroup group = new ElementGroup();
     group.addTriplePattern(
         new Triple(wfNode, NodeFactory.createURI(WFDESC_IRI + "hasSubProcess"), processNode));
     group.addTriplePattern(
         new Triple(
             processNode,
             NodeFactory.createURI(ONTOLOGY_IRI + "requiresInstallation"),
             installationNode));
     group.addTriplePattern(
         new Triple(
             installationNode,
             NodeFactory.createURI(ONTOLOGY_IRI + "dependsOn"),
             dependencyNode));
     group.addTriplePattern(
         new Triple(dependencyNode, NodeFactory.createURI(SKOS_LABEL), dependencyLabel));
     query.addElement(group);
     ElementFilter filter =
         new ElementFilter(
             new E_StrContains(
                 new ExprVar(dependencyLabel), new NodeValueString(dependencyLabelPattern)));
     query.addElementFilter(filter);
   }
 }
 /**
  * Adds an environment restriction to the query.
  *
  * @param environmentClass the environment class
  * @return this query
  */
 public ComponentQuery addInstallationEnvironmentType(String environmentClass) {
   if (environmentClass != null && !environmentClass.isEmpty()) {
     Node processNode = NodeFactory.createAnon();
     Node installationNode = NodeFactory.createAnon();
     Node environmentNode = NodeFactory.createAnon();
     ElementGroup group = new ElementGroup();
     group.addTriplePattern(
         new Triple(wfNode, NodeFactory.createURI(WFDESC_IRI + "hasSubProcess"), processNode));
     group.addTriplePattern(
         new Triple(
             processNode,
             NodeFactory.createURI(ONTOLOGY_IRI + "requiresInstallation"),
             installationNode));
     group.addTriplePattern(
         new Triple(
             installationNode,
             NodeFactory.createURI(ONTOLOGY_IRI + "hasEnvironment"),
             environmentNode));
     group.addTriplePattern(
         new Triple(
             environmentNode,
             NodeFactory.createURI(TYPE_IRI),
             NodeFactory.createURI(environmentClass)));
     query.addElement(group);
   }
   return this;
 }
    /**
     * Adds a handlesMimetypes restriction to the query.
     *
     * <p>Note that all mimetypes added using the methods {@link #addHandlesMimetype(String...)},
     * {@link #addHandlesMimetypeWildcard(String...)}, {@link #addHandlesMimetypes(String, String)}
     * and {@link #addHandlesMimetypesWildcard(String, String)} will be concatenated using UNION.
     *
     * @param leftMimetype the left mimetype
     * @param rightMimetype the right mimetype
     * @return this query
     */
    public ComponentQuery addHandlesMimetypes(String leftMimetype, String rightMimetype) {
      if (leftMimetype != null
          && !leftMimetype.isEmpty()
          && rightMimetype != null
          && !rightMimetype.isEmpty()) {
        Node node = NodeFactory.createAnon();
        ElementGroup group = new ElementGroup();
        group.addTriplePattern(
            new Triple(wfNode, NodeFactory.createURI(ONTOLOGY_IRI + "handlesMimetypes"), node));
        group.addTriplePattern(
            new Triple(
                node,
                NodeFactory.createURI(TYPE_IRI),
                NodeFactory.createURI(ONTOLOGY_IRI + "AcceptedMimetypes")));
        group.addTriplePattern(
            new Triple(
                node,
                NodeFactory.createURI(ONTOLOGY_IRI + "handlesLeftMimetype"),
                NodeFactory.createLiteral(leftMimetype)));
        group.addTriplePattern(
            new Triple(
                node,
                NodeFactory.createURI(ONTOLOGY_IRI + "handlesRightMimetype"),
                NodeFactory.createLiteral(rightMimetype)));

        handlesMimetypes.addElement(group);
      }
      return this;
    }
 /**
  * Adds a migration path restriction to the query.
  *
  * @param sourceMimetype the source mimetype
  * @param targetMimetype the target mimetype
  * @return this query
  */
 public ComponentQuery addMigrationPath(String sourceMimetype, String targetMimetype) {
   if ((sourceMimetype != null && !sourceMimetype.isEmpty())
       || (targetMimetype != null && !targetMimetype.isEmpty())) {
     Node node = NodeFactory.createAnon();
     ElementGroup group = new ElementGroup();
     group.addTriplePattern(
         new Triple(wfNode, NodeFactory.createURI(ONTOLOGY_IRI + "migrates"), node));
     group.addTriplePattern(
         new Triple(
             node,
             NodeFactory.createURI(TYPE_IRI),
             NodeFactory.createURI(ONTOLOGY_IRI + "MigrationPath")));
     if (sourceMimetype != null && !sourceMimetype.isEmpty()) {
       group.addTriplePattern(
           new Triple(
               node,
               NodeFactory.createURI(ONTOLOGY_IRI + "sourceMimetype"),
               NodeFactory.createLiteral(sourceMimetype)));
     }
     if (targetMimetype != null && !targetMimetype.isEmpty()) {
       group.addTriplePattern(
           new Triple(
               node,
               NodeFactory.createURI(ONTOLOGY_IRI + "targetMimetype"),
               NodeFactory.createLiteral(targetMimetype)));
     }
     query.addElement(group);
   }
   return this;
 }
 @Test
 public void testAddConstruct() {
   Triple t =
       new Triple(
           NodeFactory.createURI("one"),
           NodeFactory.createURI("two"),
           NodeFactory.createURI("three"));
   handler.addConstruct(t);
   Template template = query.getConstructTemplate();
   assertNotNull(template);
   List<Triple> lst = template.getTriples();
   assertEquals(1, lst.size());
   assertEquals(t, lst.get(0));
 }
 /**
  * Adds an output port type restriction to the query.
  *
  * @param provides the port type
  * @return this query
  */
 public ComponentQuery addOutputPort(String provides) {
   if (provides != null && !provides.isEmpty()) {
     Node node = NodeFactory.createAnon();
     ElementGroup group = new ElementGroup();
     group.addTriplePattern(
         new Triple(wfNode, NodeFactory.createURI(WFDESC_IRI + "hasOutput"), node));
     group.addTriplePattern(
         new Triple(
             node,
             NodeFactory.createURI(ONTOLOGY_IRI + "provides"),
             NodeFactory.createURI(provides)));
     query.addElement(group);
   }
   return this;
 }
 @Test
 public void bindingStream_61() {
   BindingMap b = BindingFactory.create();
   Node bn = NodeFactory.createAnon(new AnonId("unusual"));
   b.add(Var.alloc("v"), bn);
   testWriteRead(b);
 }
示例#13
0
 private static Node createIRI(String iriStr) {
   try {
     return NodeFactory.createURI(IRIResolver.resolveString(iriStr));
   } catch (RuntimeException ex) {
     return null;
   }
 }
示例#14
0
 public static Binding toBinding(List<String> row, List<Var> vars) {
   BindingHashMap result = new BindingHashMap();
   for (int i = 0; i < vars.size(); i++) {
     result.add(vars.get(i), NodeFactory.createLiteral(row.get(i)));
   }
   return result;
 }
示例#15
0
  @Test
  public void shouldConvertRdfObjectsToStrings() {

    final DatasetGraph mem = createMem();
    mem.add(
        createAnon(), createURI("subject"), createURI("a/b/c"), NodeFactory.createLiteral("abc"));
    mem.add(
        createAnon(), createURI("subject"),
        createURI("a-numeric-type"), createTypedLiteral(0).asNode());
    mem.add(
        createAnon(), createURI("subject"),
        createURI("an-empty-string"), createLiteral(""));
    mem.add(createAnon(), createURI("subject"), createURI("a-uri"), createURI("some-uri"));

    assertEquals(
        "abc", testObj.getObjectsAsString(mem, createURI("subject"), createResource("a/b/c")));
    assertEquals(
        "0",
        testObj.getObjectsAsString(mem, createURI("subject"), createResource("a-numeric-type")));
    assertEquals(
        "<empty>",
        testObj.getObjectsAsString(mem, createURI("subject"), createResource("an-empty-string")));
    assertEquals(
        "&lt;<a href=\"some-uri\">some-uri</a>&gt;",
        testObj.getObjectsAsString(mem, createURI("subject"), createResource("a-uri")));
    assertEquals(
        "",
        testObj.getObjectsAsString(mem, createURI("subject"), createResource("a-nonexistent-uri")));
  }
 /**
  * Adds a handlesMimetype restriction to the query.
  *
  * <p>Note that all mimetypes added using the methods {@link #addHandlesMimetype(String...)},
  * {@link #addHandlesMimetypeWildcard(String...)}, {@link #addHandlesMimetypes(String, String)}
  * and {@link #addHandlesMimetypesWildcard(String, String)} will be concatenated using UNION.
  *
  * @param mimetypes the mimetypes
  * @return this query
  */
 public ComponentQuery addHandlesMimetype(String... mimetypes) {
   if (mimetypes != null && mimetypes.length > 0) {
     ElementGroup elements = new ElementGroup();
     Set<String> mimeset = new HashSet<String>();
     Collections.addAll(mimeset, mimetypes);
     for (String mimetype : mimeset) {
       if (mimetype != null) {
         elements.addTriplePattern(
             new Triple(
                 wfNode,
                 NodeFactory.createURI(ONTOLOGY_IRI + "handlesMimetype"),
                 NodeFactory.createLiteral(mimetype)));
       }
     }
     handlesMimetypes.addElement(elements);
   }
   return this;
 }
 @Override
 public Iterator<Node> listGraphNodes() {
   List<Node> results = new LinkedList<Node>();
   Iterator<String> it = dataset.listNames();
   while (it.hasNext()) {
     results.add(NodeFactory.createURI(it.next()));
   }
   return results.iterator();
 }
 /**
  * Creates a new component query for the provided web resource.
  *
  * @param resource a web resource
  */
 private ComponentQuery(WebResource resource) {
   this.resource = resource.path(COMPONENTS_PATH);
   wfNode = NodeFactory.createVariable("w");
   addPrefix(RDF_PREFIX, RDF_IRI);
   addPrefix(ONTOLOGY_PREFIX, ONTOLOGY_IRI);
   // Add prefixes already specified in the myExperiment API
   prefixMapping.setNsPrefix(WFDESC_PREFIX, WFDESC_IRI);
   prefixMapping.setNsPrefix(RDFS_PREFIX, RDFS_IRI);
 }
示例#19
0
 @Override
 public String asSparqlTerm(PrefixLogger pl) {
   String lang = (language.equals("none") ? "" : language);
   RDFDatatype dt =
       datatype.length() == 0 ? null : TypeMapper.getInstance().getSafeTypeByName(datatype);
   Node n = NodeFactory.createLiteral(spelling, lang, dt);
   if (datatype.length() > 0) pl.present(datatype);
   String lf = FmtUtils.stringForNode(n, RDFUtils.noPrefixes);
   return lf;
 }
示例#20
0
文件: Helper.java 项目: GeoKnow/rsine
  public static void addToDatasetAndPersist(
      Statement statement,
      DatasetGraph datasetGraph,
      PersistAndNotifyProvider persistAndNotifyProvider) {
    datasetGraph
        .getDefaultGraph()
        .add(
            new Triple(
                NodeFactory.createURI(statement.getSubject().toString()),
                NodeFactory.createURI(statement.getPredicate().toString()),
                NodeFactory.createURI(statement.getObject().toString())));

    persistAndNotifyProvider.persistAndNotify(
        Helper.createChangeSetModel(
            statement.getSubject().stringValue(),
            statement.getPredicate().stringValue(),
            statement.getObject(),
            ChangeTripleService.CHANGETYPE_ADD),
        true);
  }
  @Test
  public void testSetVars() {
    Var v = Var.alloc("v");
    Triple t = new Triple(NodeFactory.createURI("one"), NodeFactory.createURI("two"), v);
    handler.addConstruct(t);
    Template template = query.getConstructTemplate();
    assertNotNull(template);
    List<Triple> lst = template.getTriples();
    assertEquals(1, lst.size());
    assertEquals(t, lst.get(0));

    Map<Var, Node> values = new HashMap<Var, Node>();
    values.put(v, NodeFactory.createURI("three"));
    handler.setVars(values);

    template = query.getConstructTemplate();
    assertNotNull(template);
    lst = template.getTriples();
    assertEquals(1, lst.size());
    t =
        new Triple(
            NodeFactory.createURI("one"),
            NodeFactory.createURI("two"),
            NodeFactory.createURI("three"));
    assertEquals(t, lst.get(0));
  }
示例#22
0
 /**
  * @param directVarMap
  * @return
  */
 public int mapNodeVarNames(Map<String, String> directVarMap) {
   for (int i = 0; i < nodes.length; i++) {
     if (nodes[i].isVariable()) {
       if (directVarMap.containsKey(nodes[i].getName())) {
         nodes[i] = NodeFactory.createVariable(directVarMap.get(nodes[i].getName()));
       } else if (this.funcs.containsKey(nodes[i])) {
         // store the function in a variable then map its vars.
         BaseValueFunction valFunc = this.funcs.get(nodes[i]);
         valFunc.mapNodeVarNames(directVarMap);
         // remove the function from the map of functions
         this.funcs.remove(nodes[i]);
         // replace the original variable node in the root function with the mapped variable node
         nodes[i] = valFunc.getResultVarNode();
         // put the function back into the map of functions with the mapped variable node as the
         // key
         this.funcs.put(nodes[i], valFunc);
       }
     }
   }
   return directVarMap.size();
 }
示例#23
0
  private ResultSetRewindable convertToStrings(ResultSetRewindable resultsActual) {
    List<Binding> bindings = new ArrayList<Binding>();
    while (resultsActual.hasNext()) {
      Binding b = resultsActual.nextBinding();
      BindingMap b2 = BindingFactory.create();

      for (String vn : resultsActual.getResultVars()) {
        Var v = Var.alloc(vn);
        Node n = b.get(v);
        String s;
        if (n == null) s = "";
        else if (n.isBlank()) s = "_:" + n.getBlankNodeLabel();
        else s = NodeFunctions.str(n);
        b2.add(v, NodeFactory.createLiteral(s));
      }
      bindings.add(b2);
    }
    ResultSet rs =
        new ResultSetStream(
            resultsActual.getResultVars(), null, new QueryIterPlainWrapper(bindings.iterator()));
    return ResultSetFactory.makeRewindable(rs);
  }
 /**
  * Adds a measure output port restriction to the query.
  *
  * @param relatedObject the object related to the measures
  * @param measure the measure
  * @return this query
  */
 public ComponentQuery addMeasureOutputPort(String relatedObject, String measure) {
   if (measure != null && !measure.isEmpty()) {
     Node node = NodeFactory.createAnon();
     ElementGroup group = new ElementGroup();
     group.addTriplePattern(
         new Triple(wfNode, NodeFactory.createURI(WFDESC_IRI + "hasOutput"), node));
     if (relatedObject != null && !relatedObject.isEmpty()) {
       group.addTriplePattern(
           new Triple(
               node,
               NodeFactory.createURI(ONTOLOGY_IRI + "relatesTo"),
               NodeFactory.createURI(ONTOLOGY_IRI + relatedObject)));
     }
     group.addTriplePattern(
         new Triple(
             node,
             NodeFactory.createURI(ONTOLOGY_IRI + "provides"),
             NodeFactory.createURI(measure)));
     query.addElement(group);
   }
   return this;
 }
 /** Finishes the migration path to filter. */
 private void finishMigrationPathFilter() {
   if (migrationPathTargetPattern != null && !migrationPathTargetPattern.isEmpty()) {
     Node migrationPath = NodeFactory.createAnon();
     Node toMimetype = NodeFactory.createVariable("migrationPathTarget");
     ElementGroup group = new ElementGroup();
     group.addTriplePattern(
         new Triple(wfNode, NodeFactory.createURI(ONTOLOGY_IRI + "migrates"), migrationPath));
     group.addTriplePattern(
         new Triple(
             migrationPath,
             NodeFactory.createURI(TYPE_IRI),
             NodeFactory.createURI(ONTOLOGY_IRI + "MigrationPath")));
     group.addTriplePattern(
         new Triple(
             migrationPath, NodeFactory.createURI(ONTOLOGY_IRI + "targetMimetype"), toMimetype));
     query.addElement(group);
     ElementFilter filter =
         new ElementFilter(
             new E_StrContains(
                 new ExprVar(toMimetype), new NodeValueString(migrationPathTargetPattern)));
     query.addElementFilter(filter);
   }
 }
 protected Node getConceptUriNode() {
   return NodeFactory.createURI(getConceptUri());
 }
示例#27
0
 public Node getIRI() {
   return NodeFactory.createURI(name);
 }
示例#28
0
 private void ensureImported(MultiUnion union, String baseURI, Model model) {
   if (!union.contains(
       Triple.create(NodeFactory.createURI(baseURI), RDF.type.asNode(), OWL.Ontology.asNode()))) {
     union.addGraph(model.getGraph());
   }
 }
示例#29
0
  @Override
  public void headAction(Node[] args, int length, RuleContext context) {

    checkArgs(length, context);
    Node n0 = getArg(0, args, context);
    Node n1 = getArg(1, args, context);
    Node n2 = getArg(2, args, context);
    logger.fine("BLACKLIST in head action " + n0 + " " + n1 + " " + n2);

    SolrDocumentList docs =
        SolrUtil.getDocsbySPO(null, IDS.hasSourceDeviceId.toString(), n2.toString(), 4, true);
    logger.fine(" docs are " + docs);
    int countIncidents = 0;
    for (int k = 0; k < docs.size(); k++) {
      SolrDocument doc = docs.get(k);
      logger.fine(" doc received is " + doc);
      String subject = (String) doc.getFieldValue("subject_t");
      SolrDocumentList docs2 = SolrUtil.getDocsbySPO(subject, "*incidentTime*", null, 1);
      logger.fine(" docs2 are " + docs2);
      Calendar timeStamp = Util.toCalendar((String) docs2.get(0).getFieldValue("object_t"));
      logger.info("TIMESTAMP: " + timeStamp.getTime());

      Calendar oldNow = Util.getNowPlus(-86400000);
      logger.fine("24 hours ago: " + Util.calendarToISO8601String(oldNow));

      if (timeStamp.compareTo(oldNow) > 0) {
        countIncidents = countIncidents + 1;
        logger.fine("CountIncidents: " + countIncidents);
      }
    }
    SolrDocumentList docs2 =
        SolrUtil.getDocsbySPO(
            null, IDS.isAttackedByID.toString(), n2.toString(), SolrUtil.MAX_DOCS, true);

    if (docs2.size() <= 0) {
      if (countIncidents == 4) {

        Node sub = NodeFactory.createURI(IDS.Attack.getURI() + UUID.randomUUID());
        Node pred1 = NodeFactory.createURI(IDS.hasStatus.getURI());
        Node obj1 = NodeFactory.createLiteral("Multiple ID Attempt Attack");
        context.add(new Triple(sub, pred1, obj1));
        logger.fine("added n-triple: " + sub + "," + pred1 + "," + obj1);

        Node pred2 = NodeFactory.createURI(IDS.isAttackedByID.getURI());
        context.add(new Triple(sub, pred2, n2));
        logger.fine("added n-triple: " + sub + "," + pred2 + "," + n2);

        SolrDocument doc = docs.get(0);
        Date timeStamp = (Date) doc.getFieldValue("timestamp");
        Calendar cal = Calendar.getInstance();
        cal.setTime(timeStamp);
        String aTimeStart = Util.calendarToISO8601String(cal);
        Node obj2 = NodeFactory.createLiteral(aTimeStart);

        Node pred3 = NodeFactory.createURI(IDS.attackStartTime.getURI());
        context.add(new Triple(sub, pred3, obj2));
        logger.fine("added n-triple: " + sub + "," + pred3 + "," + obj2);

        SolrDocument doc2 = docs.get(docs.size() - 1);
        Date timeStamp2 = (Date) doc.getFieldValue("timestamp");
        Calendar cal2 = Calendar.getInstance();
        cal2.setTime(timeStamp2);
        String aTimeStart2 = Util.calendarToISO8601String(cal2);
        Node obj3 = NodeFactory.createLiteral(aTimeStart2);

        Node pred4 = NodeFactory.createURI(IDS.attackEndTime.getURI());
        context.add(new Triple(sub, pred4, obj3));
        logger.fine("added n-triple: " + sub + "," + pred4 + "," + obj3);
      }
    }
  }
 private Node createLiteral(String lex, String datatype, String lang) {
   if (lang == null && datatype == null) return NodeFactory.createLiteral(lex);
   if (lang != null) return NodeFactory.createLiteral(lex, lang, null);
   RDFDatatype dt = NodeFactory.getType(datatype);
   return NodeFactory.createLiteral(lex, dt);
 }