@Test
  public void directDataLinkToQuads() throws IOException {
    // first make a file of quads to load later
    final Model model = createDefaultModel();
    final Path quads = createTempFile("quadExample", ".nq");
    final Resource quadsURI = model.createResource(quads.toFile().toURI().toString());
    final Resource simpleExample = model.createResource("test:simpleExample");
    simpleExample.addProperty(type, DatasetAssemblerVocab.tDatasetTxnMem);
    simpleExample.addProperty(data, quadsURI);

    final DatasetGraph dsg = createTxnMem().asDatasetGraph();
    model
        .listStatements()
        .mapWith(Statement::asTriple)
        .mapWith(t -> new Quad(quadsURI.asNode(), t))
        .forEachRemaining(dsg::add);
    try (OutputStream out = new FileOutputStream(quads.toFile())) {
      write(out, dsg, NQUADS);
    }

    final Dataset dataset = assemble(simpleExample);
    final Model assembledDefaultModel = dataset.getDefaultModel();
    final Model assembledNamedModel = dataset.getNamedModel(quadsURI.getURI());
    assertTrue(assembledDefaultModel.isEmpty());
    assertTrue(
        assembledNamedModel.contains(
            assembledNamedModel.createStatement(simpleExample, data, quadsURI)));
  }
  @Test
  /**
   * Reads from TRIG with Jena API into Dataset 1, transforms one named Model from that Dataset into
   * Signingframework's API GraphCollection with one NamedGraph, transforms (converts) that
   * NamedGraph into Jena's Model, and checks if the resulting Model is the same as original Model.
   */
  public void namedGraphToModelTest() throws Exception {

    for (String resourceFile : RESOURCE_FILES) {

      // prepare GraphCollection with NamedGraph to be converted:
      InputStream is = this.getClass().getResourceAsStream(resourceFile);
      Dataset dataset = DatasetFactory.createMem();
      RDFDataMgr.read(dataset, is, RDFFormat.TRIG.getLang());
      is.close();
      String modelName = dataset.listNames().next();
      Model model1 = dataset.getNamedModel(modelName);
      // this method is not tested here and used just for input
      // generation and to make it easier Namedgraph<->Model comparison
      // (but it's tested in other method, see modelToGraphCollectionTest())
      GraphCollection gc = ModelConverter.modelToGraphCollection(modelName, dataset);
      LinkedList<NamedGraph> graphs = gc.getGraphs();
      String graphName = null;
      for (NamedGraph g : graphs) {
        if (!g.getName().isEmpty() && g.getName().contains(modelName)) {
          graphName = g.getName();
          break;
        }
      }
      // use this when debugging:
      // File outFile0 = File.createTempFile("won", ".trig");
      // System.out.println(outFile0);
      // OutputStream os0 = new FileOutputStream(outFile0);
      // TriGPlusWriter.writeFile(gc, outFile0.getAbsolutePath(), false);
      // os0.close();

      // test convert from NamedGraph of GraphCollection into Model
      Model model2 = ModelConverter.namedGraphToModel(graphName, gc);
      Dataset dataset2 = DatasetFactory.createMem();
      dataset2.addNamedModel(modelName, model2);
      // TODO maybe chng the API so that the prefix map is taken care of in the converter:
      // if it makes sense from the the usage of this in Assembler point of view
      dataset2.getDefaultModel().setNsPrefixes(dataset2.getNamedModel(modelName).getNsPrefixMap());

      File outFile = testFolder.newFile();
      // use this when debugging:
      // File outFile = File.createTempFile("won", ".trig");
      // System.out.println(outFile);
      OutputStream os = new FileOutputStream(outFile);
      RDFDataMgr.write(os, dataset2, RDFFormat.TRIG.getLang());
      os.close();

      // make sure that the original Model that was used to generate test input
      // GraphCollection with NamedGraph is isomorphic with the Model after
      // conversion is applied:
      Assert.assertTrue(model1.listStatements().hasNext() && model2.listStatements().hasNext());
      Assert.assertTrue(model1.isIsomorphicWith(model2));
    }
  }
 @Override
 protected void writeTuples(File f, List<Triple> tuples) throws FileNotFoundException {
   Graph g = GraphFactory.createGraphMem();
   for (Triple t : tuples) {
     g.add(t);
   }
   RDFDataMgr.write(new FileOutputStream(f), g, getLanguage());
 }
Example #4
0
  @Override
  protected void doGet(HttpAction action) {
    // Assume success - do the set up before grabbing the lock.
    // Sets content type.
    MediaType mediaType = ActionLib.contentNegotationRDF(action);

    ServletOutputStream output;
    try {
      output = action.response.getOutputStream();
    } catch (IOException ex) {
      ServletOps.errorOccurred(ex);
      output = null;
    }

    TypedOutputStream out = new TypedOutputStream(output, mediaType);
    Lang lang = RDFLanguages.contentTypeToLang(mediaType.getContentType());

    if (action.verbose)
      action.log.info(
          format(
              "[%d]   Get: Content-Type=%s, Charset=%s => %s",
              action.id, mediaType.getContentType(), mediaType.getCharset(), lang.getName()));

    action.beginRead();
    setCommonHeaders(action.response);
    try {
      Target target = determineTarget(action);
      if (action.log.isDebugEnabled()) action.log.debug("GET->" + target);
      boolean exists = target.exists();
      if (!exists) ServletOps.errorNotFound("No such graph: <" + target.name + ">");
      // If we want to set the Content-Length, we need to buffer.
      // response.setContentLength(??) ;
      String ct = lang.getContentType().toHeaderString();
      action.response.setContentType(ct);
      Graph g = target.graph();
      // Special case RDF/XML to be the plain (faster, less readable) form
      RDFFormat fmt =
          (lang == Lang.RDFXML)
              ? RDFFormat.RDFXML_PLAIN
              : RDFWriterRegistry.defaultSerialization(lang);
      try {
        RDFDataMgr.write(out, g, fmt);
      } catch (JenaException ex) {
        // Some RDF/XML data is unwritable. All we can do is pretend it's a bad
        // request (inappropriate content type).
        // Good news - this happens before any output for RDF/XML-ABBREV.
        if (fmt.getLang().equals(Lang.RDFXML))
          ServletOps.errorBadRequest("Failed to write output in RDF/XML: " + ex.getMessage());
        else ServletOps.errorOccurred("Failed to write output: " + ex.getMessage(), ex);
      }
      ServletOps.success(action);
    } finally {
      action.endRead();
    }
  }
  @Test
  /**
   * Reads from TRIG with Jena API into Dataset 1, transforms one named graph from that Dataset into
   * Signingframework's API GraphCollection and writes it with Signingframework's API, reads the
   * result with Jena API into Dataset 2, and checks if the specified named graph model from Dataset
   * 1 is isomorphic with the same named graph model from Dataset 2.
   */
  public void modelToGraphCollectionTest() throws Exception {

    for (String resourceFile : RESOURCE_FILES) {

      // prepare the input Dataset containg the Model to be converted
      InputStream is = this.getClass().getResourceAsStream(resourceFile);
      File outFile = testFolder.newFile();
      // use this when debugging:
      // File outFile = File.createTempFile("won", ".trig");
      // System.out.println(outFile);
      Dataset dataset = DatasetFactory.createMem();
      RDFDataMgr.read(dataset, is, RDFFormat.TRIG.getLang());
      is.close();

      // test the convertion from the Model to the NamedGraph
      String modelName = dataset.listNames().next();
      Model model = dataset.getNamedModel(modelName);
      // the method to be tested
      GraphCollection gc = ModelConverter.modelToGraphCollection(modelName, dataset);
      TriGPlusWriter.writeFile(gc, outFile.getAbsolutePath(), false);

      // check that the resulting graph collection is a representation
      // of the converted model. For this, read the resulting graph collection
      // as a Model with Jena API
      InputStream is2 = new FileInputStream(outFile);
      Dataset dataset2 = DatasetFactory.createMem();
      RDFDataMgr.read(dataset2, is2, RDFFormat.TRIG.getLang());
      is2.close();
      Model model2 = dataset2.getNamedModel(modelName);
      File outFile2 = testFolder.newFile();
      // use this when debugging:
      // File outFile2 = File.createTempFile("won", ".trig");
      // System.out.println(outFile2);
      OutputStream os = new FileOutputStream(outFile2);
      RDFDataMgr.write(os, dataset2, RDFFormat.TRIG.getLang());
      os.close();

      // check that the model obtained from resulting graph collection is
      // a representation of the original converted model.
      Assert.assertTrue(model.listStatements().hasNext() && model2.listStatements().hasNext());
      Assert.assertTrue(model.isIsomorphicWith(model2));
    }
  }
Example #6
0
  public static void outputInfo(Model model, String var) {
    // compose the query
    String sparql =
        "BASE <http://example.org/inst/>"
            + "CONSTRUCT { <"
            + var
            + "> ?P ?O } "
            + "WHERE { <"
            + var
            + "> ?P ?O }";

    // carry out the query
    Query query = QueryFactory.create(sparql);
    QueryExecution qe = QueryExecutionFactory.create(query, model);
    Model results = qe.execConstruct();

    // print inferred triples
    System.out.println();
    System.out.println(var + ":");
    RDFDataMgr.write(System.out, results, RDFLanguages.NT);
  }
  public static void main(String[] args) throws IOException {
    if (args.length != 2) {
      err.println("usage: difference <db1> <db2>");
    }

    Dataset ds1 = dataset_(args[0]);
    Dataset ds2 = dataset_(args[1]);
    Model m1 = ds1.getDefaultModel();
    Model m2 = ds2.getDefaultModel();
    System.out.println(m1.size());
    System.out.println(m2.size());

    Model m1_minus_m2 = m1.difference(m2);
    RDFDataMgr.write(
        new FileOutputStream("missing-orthologousMatch-in-biological-concepts-db.ttl"),
        m1_minus_m2,
        Lang.TURTLE);

    ds1.close();
    ds2.close();
  }
  @Override
  public void process(final Model model) {
    String identifier = null;
    try {
      identifier =
          model
              .listObjectsOfProperty(model.createProperty(filenameUtil.property))
              .next()
              .toString();
      LOG.debug("Going to store identifier=" + identifier);
    } catch (NoSuchElementException e) {
      LOG.warn("No identifier => cannot derive a filename for " + model.toString());
      return;
    }

    String directory = identifier;
    if (directory.length() >= filenameUtil.endIndex) {
      directory = directory.substring(filenameUtil.startIndex, filenameUtil.endIndex);
    }
    final String file =
        FilenameUtils.concat(
            filenameUtil.target,
            FilenameUtils.concat(
                directory + File.separator, identifier + "." + filenameUtil.fileSuffix));
    LOG.debug("Write to " + file);
    filenameUtil.ensurePathExists(file);

    try (final Writer writer =
        new OutputStreamWriter(new FileOutputStream(file), filenameUtil.encoding)) {
      final StringWriter tripleWriter = new StringWriter();
      RDFDataMgr.write(tripleWriter, model, this.serialization);
      IOUtils.write(tripleWriter.toString(), writer);
      writer.close();
    } catch (IOException e) {
      e.printStackTrace();
      throw new MetafactureException(e);
    }
  }
  @Test
  public void directDataLinkForDefaultAndNamedGraphs() throws IOException {
    // first make a file of triples to load later
    final Model model = createDefaultModel();
    final Path triples = createTempFile("simpleExample", ".nt");
    final Resource triplesURI = model.createResource(triples.toFile().toURI().toString());
    final Resource simpleExample = model.createResource("test:simpleExample");
    simpleExample.addProperty(type, DatasetAssemblerVocab.tDatasetTxnMem);
    // add a default graph
    simpleExample.addProperty(data, triplesURI);
    // add a named graph
    final Resource namedGraphDef = model.createResource("test:namedGraphDef");
    simpleExample.addProperty(pNamedGraph, namedGraphDef);
    final Resource namedGraphName = model.createResource("test:namedGraphExample");
    namedGraphDef.addProperty(type, MemoryModel);
    namedGraphDef.addProperty(pGraphName, namedGraphName);
    namedGraphDef.addProperty(data, triplesURI);

    try (OutputStream out = new FileOutputStream(triples.toFile())) {
      write(out, model, NTRIPLES);
    }

    final Dataset dataset = assemble(simpleExample);
    final Model assembledDefaultModel = dataset.getDefaultModel();
    final Model assembledNamedModel = dataset.getNamedModel(namedGraphName.getURI());

    // we put the same triples in each model, so we check for the same triples in each model
    for (final Model m : new Model[] {assembledDefaultModel, assembledNamedModel}) {
      assertTrue(m.contains(simpleExample, pNamedGraph, namedGraphDef));
      assertTrue(m.contains(namedGraphDef, pGraphName, namedGraphName));
      assertTrue(m.contains(simpleExample, data, triplesURI));
    }
    final Iterator<Node> graphNodes = dataset.asDatasetGraph().listGraphNodes();
    assertTrue(graphNodes.hasNext());
    assertEquals(namedGraphName.asNode(), graphNodes.next());
    assertFalse(graphNodes.hasNext());
  }
Example #10
0
  public void readVideosFromRDF(String keyword) {
    Model model = ModelFactory.createDefaultModel();
    model.read(keyword + ".nt", "NTRIPLES");

    RDFDataMgr.write(System.out, model, Lang.NTRIPLES);
  }
Example #11
0
  public void writeVideosToRDF(String keyword) {

    String api_key = "AIzaSyCZO2nHBNMSGgRg4VHMZ9P8dWT0H23J-Fc";
    String yt_url =
        "https://www.googleapis.com/youtube/v3/search?part=snippet&q="
            + keyword
            + "&type=video&videoCaption=closedCaption&key="
            + api_key
            + "&format=5&maxResults=10&v=2";
    String line = "", stringArray;
    StringBuilder stringArrayBuilder = new StringBuilder();

    String titleOfVideo;
    String description;
    String thumbnailURL;
    String videoId;

    Model model = ModelFactory.createDefaultModel();

    try {
      URL url = new URL(yt_url);
      BufferedReader br = new BufferedReader(new InputStreamReader(url.openStream()));
      while ((line = br.readLine()) != null) {
        stringArrayBuilder = stringArrayBuilder.append(line);
      }
      stringArray = stringArrayBuilder.toString();

      JSONObject nodeRoot = new JSONObject(stringArray);
      JSONArray jsonArray = (JSONArray) nodeRoot.get("items");

      for (int i = 0; i < jsonArray.length(); i++) {
        JSONObject obj = jsonArray.getJSONObject(i);

        JSONObject snippet = (JSONObject) obj.get("snippet");

        description = (String) snippet.get("description");
        titleOfVideo = (String) snippet.get("title");

        JSONObject thumbnails = (JSONObject) snippet.get("thumbnails");
        JSONObject thumbnail = (JSONObject) thumbnails.get("high");
        thumbnailURL = (String) thumbnail.get("url");

        JSONObject id = (JSONObject) obj.get("id");
        videoId = (String) id.get("videoId");

        Resource video = model.createResource("video" + i);
        Property p1 = model.createProperty("title");
        video.addProperty(p1, titleOfVideo);
        Property p2 = model.createProperty("description");
        video.addProperty(p2, description);
        Property p3 = model.createProperty("thumbnail");
        video.addProperty(p3, thumbnailURL);
        Property p4 = model.createProperty("id");
        video.addProperty(p4, videoId);
      }
      FileOutputStream fos = new FileOutputStream(keyword + ".nt");

      RDFDataMgr.write(fos, model, Lang.NTRIPLES);

    } catch (Exception ex) {
      ex.printStackTrace();
    }
  }