예제 #1
0
  @Test
  /**
   * Reads from TRIG with Jena API into Dataset 1, transforms one named Model from that Dataset into
   * Signingframework's API GraphCollection with one NamedGraph, transforms (converts) that
   * NamedGraph into Jena's Model, and checks if the resulting Model is the same as original Model.
   */
  public void namedGraphToModelTest() throws Exception {

    for (String resourceFile : RESOURCE_FILES) {

      // prepare GraphCollection with NamedGraph to be converted:
      InputStream is = this.getClass().getResourceAsStream(resourceFile);
      Dataset dataset = DatasetFactory.createMem();
      RDFDataMgr.read(dataset, is, RDFFormat.TRIG.getLang());
      is.close();
      String modelName = dataset.listNames().next();
      Model model1 = dataset.getNamedModel(modelName);
      // this method is not tested here and used just for input
      // generation and to make it easier Namedgraph<->Model comparison
      // (but it's tested in other method, see modelToGraphCollectionTest())
      GraphCollection gc = ModelConverter.modelToGraphCollection(modelName, dataset);
      LinkedList<NamedGraph> graphs = gc.getGraphs();
      String graphName = null;
      for (NamedGraph g : graphs) {
        if (!g.getName().isEmpty() && g.getName().contains(modelName)) {
          graphName = g.getName();
          break;
        }
      }
      // use this when debugging:
      // File outFile0 = File.createTempFile("won", ".trig");
      // System.out.println(outFile0);
      // OutputStream os0 = new FileOutputStream(outFile0);
      // TriGPlusWriter.writeFile(gc, outFile0.getAbsolutePath(), false);
      // os0.close();

      // test convert from NamedGraph of GraphCollection into Model
      Model model2 = ModelConverter.namedGraphToModel(graphName, gc);
      Dataset dataset2 = DatasetFactory.createMem();
      dataset2.addNamedModel(modelName, model2);
      // TODO maybe chng the API so that the prefix map is taken care of in the converter:
      // if it makes sense from the the usage of this in Assembler point of view
      dataset2.getDefaultModel().setNsPrefixes(dataset2.getNamedModel(modelName).getNsPrefixMap());

      File outFile = testFolder.newFile();
      // use this when debugging:
      // File outFile = File.createTempFile("won", ".trig");
      // System.out.println(outFile);
      OutputStream os = new FileOutputStream(outFile);
      RDFDataMgr.write(os, dataset2, RDFFormat.TRIG.getLang());
      os.close();

      // make sure that the original Model that was used to generate test input
      // GraphCollection with NamedGraph is isomorphic with the Model after
      // conversion is applied:
      Assert.assertTrue(model1.listStatements().hasNext() && model2.listStatements().hasNext());
      Assert.assertTrue(model1.isIsomorphicWith(model2));
    }
  }
예제 #2
0
  @Test
  /**
   * Reads from TRIG with Jena API into Dataset 1, transforms one named graph from that Dataset into
   * Signingframework's API GraphCollection and writes it with Signingframework's API, reads the
   * result with Jena API into Dataset 2, and checks if the specified named graph model from Dataset
   * 1 is isomorphic with the same named graph model from Dataset 2.
   */
  public void modelToGraphCollectionTest() throws Exception {

    for (String resourceFile : RESOURCE_FILES) {

      // prepare the input Dataset containg the Model to be converted
      InputStream is = this.getClass().getResourceAsStream(resourceFile);
      File outFile = testFolder.newFile();
      // use this when debugging:
      // File outFile = File.createTempFile("won", ".trig");
      // System.out.println(outFile);
      Dataset dataset = DatasetFactory.createMem();
      RDFDataMgr.read(dataset, is, RDFFormat.TRIG.getLang());
      is.close();

      // test the convertion from the Model to the NamedGraph
      String modelName = dataset.listNames().next();
      Model model = dataset.getNamedModel(modelName);
      // the method to be tested
      GraphCollection gc = ModelConverter.modelToGraphCollection(modelName, dataset);
      TriGPlusWriter.writeFile(gc, outFile.getAbsolutePath(), false);

      // check that the resulting graph collection is a representation
      // of the converted model. For this, read the resulting graph collection
      // as a Model with Jena API
      InputStream is2 = new FileInputStream(outFile);
      Dataset dataset2 = DatasetFactory.createMem();
      RDFDataMgr.read(dataset2, is2, RDFFormat.TRIG.getLang());
      is2.close();
      Model model2 = dataset2.getNamedModel(modelName);
      File outFile2 = testFolder.newFile();
      // use this when debugging:
      // File outFile2 = File.createTempFile("won", ".trig");
      // System.out.println(outFile2);
      OutputStream os = new FileOutputStream(outFile2);
      RDFDataMgr.write(os, dataset2, RDFFormat.TRIG.getLang());
      os.close();

      // check that the model obtained from resulting graph collection is
      // a representation of the original converted model.
      Assert.assertTrue(model.listStatements().hasNext() && model2.listStatements().hasNext());
      Assert.assertTrue(model.isIsomorphicWith(model2));
    }
  }
  private static RdfStream getRdfStreamFromResource(final String resourcePath, final Lang lang) {
    final Model model = createDefaultModel();

    RDFDataMgr.read(model, WebACRolesProviderTest.class.getResourceAsStream(resourcePath), lang);

    final List<Triple> triples = new ArrayList<>();
    model
        .listStatements()
        .forEachRemaining(
            x -> {
              final Triple t = x.asTriple();
              if (t.getObject().isURI() && t.getObject().getURI().startsWith(FEDORA_URI_PREFIX)) {
                triples.add(
                    new Triple(
                        t.getSubject(),
                        t.getPredicate(),
                        createURI(
                            FEDORA_PREFIX
                                + t.getObject().getURI().substring(FEDORA_URI_PREFIX.length()))));
              } else {
                triples.add(t);
              }
            });

    return new RdfStream(triples);
  }
예제 #4
0
  // ** Worker.
  private static void readUtil(Graph graph, String uri, int limit) {
    // We need to do this ourselves, not via riot, to use the webStreamManager
    StreamRDF sink = StreamRDFLib.graph(graph);
    sink = new SinkRDFLimited(sink, limit);

    TypedInputStream input = Fuseki.webStreamManager.open(uri);
    RDFDataMgr.parse(sink, input, uri);
  }
 @Override
 protected void writeTuples(File f, List<Triple> tuples) throws FileNotFoundException {
   Graph g = GraphFactory.createGraphMem();
   for (Triple t : tuples) {
     g.add(t);
   }
   RDFDataMgr.write(new FileOutputStream(f), g, getLanguage());
 }
 /**
  * Try Read content returned by text/plain
  *
  * @param uri
  * @return
  */
 private Model tryRead(String uri) {
   Model m = ModelFactory.createDefaultModel();
   try {
     m = RDFDataMgr.loadModel(uri, Lang.NTRIPLES);
   } catch (RiotException r) {
     Log.debug("Resource could not be parsed:", r.getMessage());
   }
   return m;
 }
  // Private Method for checking forward linking
  private void checkForForwardLinking() {
    for (String uri : uriSet) {
      CachedHTTPResource httpResource =
          (CachedHTTPResource)
              DiachronCacheManager.getInstance()
                  .getFromCache(DiachronCacheManager.HTTP_RESOURCE_CACHE, uri);

      if (httpResource == null
          || (httpResource.getResponses() == null
              && httpResource.getDereferencabilityStatusCode() != StatusCode.BAD)) {
        this.notFetchedQueue.add(uri);
      } else {
        logger.info("Checking resource: {}. URIs left: {}.", httpResource.getUri(), uriSet.size());

        // We perform a semantic lookup using heuristics to check if we
        // really need to try parsing or not
        if (HTTPResourceUtils.semanticURILookup(httpResource)) {
          logger.info(
              "Trying to find any dereferencable forward links for {}.", httpResource.getUri());
          if (Dereferencer.hasValidDereferencability(httpResource)) {
            logger.info("Dereferencable resource {}.", httpResource.getUri());

            //						boolean isValid = ModelParser.snapshotParserForForwardDereference(httpResource,
            // (Lang) null, httpResource.getUri());
            //						if (isValid){
            //							//ok
            //							logger.info("A description exists for resource {}.", httpResource.getUri());
            //
            //							totalDerefDataWithSub++;
            //						} else {
            //							//not ok
            //							this.createNotValidForwardLink(httpResource.getUri());
            //						}

            Model m = RDFDataMgr.loadModel(httpResource.getUri()); // load partial model
            Resource r = m.createResource(httpResource.getUri());
            List<Statement> stmtList =
                m.listStatements(r, (Property) null, (RDFNode) null).toList();

            if (stmtList.size() > 1) {
              // ok
              logger.info("A description exists for resource {}.", httpResource.getUri());

              totalDerefDataWithSub++;
            } else {
              // not ok
              this.createNotValidForwardLink(httpResource.getUri());
            }
          }
        } else {
          logger.info("Non-meaningful dereferencable resource {}.", httpResource.getUri());
          this.createNotValidForwardLink(httpResource.getUri());
        }
      }
    }
  }
예제 #8
0
파일: Helper.java 프로젝트: GeoKnow/rsine
 public static DatasetGraph initFuseki(URL rdfFile, String datasetName) {
   URI rdfFileUri = new File(rdfFile.getFile()).toURI();
   DatasetGraph datasetGraph = DatasetGraphFactory.createMem();
   RDFDataMgr.read(datasetGraph, rdfFileUri.toString());
   ServerConfig serverConfig = FusekiConfig.defaultConfiguration(datasetName, datasetGraph, true);
   SPARQLServer fusekiServer = new SPARQLServer(serverConfig);
   Fuseki.setServer(fusekiServer);
   fusekiServer.start();
   return datasetGraph;
 }
예제 #9
0
  @Override
  protected void doGet(HttpAction action) {
    // Assume success - do the set up before grabbing the lock.
    // Sets content type.
    MediaType mediaType = ActionLib.contentNegotationRDF(action);

    ServletOutputStream output;
    try {
      output = action.response.getOutputStream();
    } catch (IOException ex) {
      ServletOps.errorOccurred(ex);
      output = null;
    }

    TypedOutputStream out = new TypedOutputStream(output, mediaType);
    Lang lang = RDFLanguages.contentTypeToLang(mediaType.getContentType());

    if (action.verbose)
      action.log.info(
          format(
              "[%d]   Get: Content-Type=%s, Charset=%s => %s",
              action.id, mediaType.getContentType(), mediaType.getCharset(), lang.getName()));

    action.beginRead();
    setCommonHeaders(action.response);
    try {
      Target target = determineTarget(action);
      if (action.log.isDebugEnabled()) action.log.debug("GET->" + target);
      boolean exists = target.exists();
      if (!exists) ServletOps.errorNotFound("No such graph: <" + target.name + ">");
      // If we want to set the Content-Length, we need to buffer.
      // response.setContentLength(??) ;
      String ct = lang.getContentType().toHeaderString();
      action.response.setContentType(ct);
      Graph g = target.graph();
      // Special case RDF/XML to be the plain (faster, less readable) form
      RDFFormat fmt =
          (lang == Lang.RDFXML)
              ? RDFFormat.RDFXML_PLAIN
              : RDFWriterRegistry.defaultSerialization(lang);
      try {
        RDFDataMgr.write(out, g, fmt);
      } catch (JenaException ex) {
        // Some RDF/XML data is unwritable. All we can do is pretend it's a bad
        // request (inappropriate content type).
        // Good news - this happens before any output for RDF/XML-ABBREV.
        if (fmt.getLang().equals(Lang.RDFXML))
          ServletOps.errorBadRequest("Failed to write output in RDF/XML: " + ex.getMessage());
        else ServletOps.errorOccurred("Failed to write output: " + ex.getMessage(), ex);
      }
      ServletOps.success(action);
    } finally {
      action.endRead();
    }
  }
 public static Model readModel(String modelFile) throws FileNotFoundException {
   FileInputStream fin = null;
   try {
     Model model = ModelFactory.createDefaultModel();
     fin = new FileInputStream(modelFile);
     RDFDataMgr.read(model, fin, Lang.NT);
     return model;
   } finally {
     IOUtils.closeQuietly(fin);
   }
 }
  protected static Model jsonLdAsJenaModel(InputStream jsonIn, URI base)
      throws IOException, RiotException {
    Model model = ModelFactory.createDefaultModel();
    RDFDataMgr.read(model, jsonIn, base.toASCIIString(), Lang.JSONLD);
    return model;

    //
    // Object input = JSONUtils.fromInputStream(jsonIn);
    // JSONLDTripleCallback callback = new JenaTripleCallback();
    // Model model = (Model)JSONLD.toRDF(input, callback, new
    // Options(base.toASCIIString()));
    // return model;
  }
예제 #12
0
  /** Read the RDF model from files. */
  public static void readSemanticModelFiles() {
    logger.debug("Reading the model from a file");
    // Read the model to an existing model
    String dataDir = UQasarUtil.getDataDirPath();
    String modelPath = "file:///" + dataDir + ONTOLOGYFILE;
    //		String modelPath =
    // "file:///C:/nyrhinen/Programme/jboss-as-7.1.1.Final/standalone/data/uq-ontology-model.rdf";

    OntModel model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
    RDFDataMgr.read(model, modelPath);
    // Test output to standard output
    //		RDFDataMgr.write(System.out, uqModel, RDFFormat.RDFXML_PRETTY);
    logger.debug("Model read from file " + modelPath);
    UQasarUtil.setUqModel(model);
    System.out.println("Reading done.");
  }
  /** Harvests all the triplets from each URI in the @rdfUris list */
  private void harvestFromDumps() {
    for (String uri : rdfUris) {
      if (uri.isEmpty()) continue;

      logger.info("Harvesting uri [{}]", uri);

      Model model = ModelFactory.createDefaultModel();
      try {
        RDFDataMgr.read(model, uri.trim(), RDFLanguages.RDFXML);
        BulkRequestBuilder bulkRequest = client.prepareBulk();
        addModelToES(model, bulkRequest, true);
      } catch (RiotException re) {
        logger.error("Illegal xml character [{}]", re.getLocalizedMessage());
      } catch (Exception e) {
        logger.error(
            "Exception when harvesting url: {}. Details: {}", uri, e.getLocalizedMessage());
      }
    }
  }
예제 #14
0
  public static void outputInfo(Model model, String var) {
    // compose the query
    String sparql =
        "BASE <http://example.org/inst/>"
            + "CONSTRUCT { <"
            + var
            + "> ?P ?O } "
            + "WHERE { <"
            + var
            + "> ?P ?O }";

    // carry out the query
    Query query = QueryFactory.create(sparql);
    QueryExecution qe = QueryExecutionFactory.create(query, model);
    Model results = qe.execConstruct();

    // print inferred triples
    System.out.println();
    System.out.println(var + ":");
    RDFDataMgr.write(System.out, results, RDFLanguages.NT);
  }
예제 #15
0
  public static void main(String[] args) throws IOException {
    if (args.length != 2) {
      err.println("usage: difference <db1> <db2>");
    }

    Dataset ds1 = dataset_(args[0]);
    Dataset ds2 = dataset_(args[1]);
    Model m1 = ds1.getDefaultModel();
    Model m2 = ds2.getDefaultModel();
    System.out.println(m1.size());
    System.out.println(m2.size());

    Model m1_minus_m2 = m1.difference(m2);
    RDFDataMgr.write(
        new FileOutputStream("missing-orthologousMatch-in-biological-concepts-db.ttl"),
        m1_minus_m2,
        Lang.TURTLE);

    ds1.close();
    ds2.close();
  }
예제 #16
0
  @Override
  public void process(final Model model) {
    String identifier = null;
    try {
      identifier =
          model
              .listObjectsOfProperty(model.createProperty(filenameUtil.property))
              .next()
              .toString();
      LOG.debug("Going to store identifier=" + identifier);
    } catch (NoSuchElementException e) {
      LOG.warn("No identifier => cannot derive a filename for " + model.toString());
      return;
    }

    String directory = identifier;
    if (directory.length() >= filenameUtil.endIndex) {
      directory = directory.substring(filenameUtil.startIndex, filenameUtil.endIndex);
    }
    final String file =
        FilenameUtils.concat(
            filenameUtil.target,
            FilenameUtils.concat(
                directory + File.separator, identifier + "." + filenameUtil.fileSuffix));
    LOG.debug("Write to " + file);
    filenameUtil.ensurePathExists(file);

    try (final Writer writer =
        new OutputStreamWriter(new FileOutputStream(file), filenameUtil.encoding)) {
      final StringWriter tripleWriter = new StringWriter();
      RDFDataMgr.write(tripleWriter, model, this.serialization);
      IOUtils.write(tripleWriter.toString(), writer);
      writer.close();
    } catch (IOException e) {
      e.printStackTrace();
      throw new MetafactureException(e);
    }
  }
예제 #17
0
  public void readVideosFromRDF(String keyword) {
    Model model = ModelFactory.createDefaultModel();
    model.read(keyword + ".nt", "NTRIPLES");

    RDFDataMgr.write(System.out, model, Lang.NTRIPLES);
  }
예제 #18
0
  public void writeVideosToRDF(String keyword) {

    String api_key = "AIzaSyCZO2nHBNMSGgRg4VHMZ9P8dWT0H23J-Fc";
    String yt_url =
        "https://www.googleapis.com/youtube/v3/search?part=snippet&q="
            + keyword
            + "&type=video&videoCaption=closedCaption&key="
            + api_key
            + "&format=5&maxResults=10&v=2";
    String line = "", stringArray;
    StringBuilder stringArrayBuilder = new StringBuilder();

    String titleOfVideo;
    String description;
    String thumbnailURL;
    String videoId;

    Model model = ModelFactory.createDefaultModel();

    try {
      URL url = new URL(yt_url);
      BufferedReader br = new BufferedReader(new InputStreamReader(url.openStream()));
      while ((line = br.readLine()) != null) {
        stringArrayBuilder = stringArrayBuilder.append(line);
      }
      stringArray = stringArrayBuilder.toString();

      JSONObject nodeRoot = new JSONObject(stringArray);
      JSONArray jsonArray = (JSONArray) nodeRoot.get("items");

      for (int i = 0; i < jsonArray.length(); i++) {
        JSONObject obj = jsonArray.getJSONObject(i);

        JSONObject snippet = (JSONObject) obj.get("snippet");

        description = (String) snippet.get("description");
        titleOfVideo = (String) snippet.get("title");

        JSONObject thumbnails = (JSONObject) snippet.get("thumbnails");
        JSONObject thumbnail = (JSONObject) thumbnails.get("high");
        thumbnailURL = (String) thumbnail.get("url");

        JSONObject id = (JSONObject) obj.get("id");
        videoId = (String) id.get("videoId");

        Resource video = model.createResource("video" + i);
        Property p1 = model.createProperty("title");
        video.addProperty(p1, titleOfVideo);
        Property p2 = model.createProperty("description");
        video.addProperty(p2, description);
        Property p3 = model.createProperty("thumbnail");
        video.addProperty(p3, thumbnailURL);
        Property p4 = model.createProperty("id");
        video.addProperty(p4, videoId);
      }
      FileOutputStream fos = new FileOutputStream(keyword + ".nt");

      RDFDataMgr.write(fos, model, Lang.NTRIPLES);

    } catch (Exception ex) {
      ex.printStackTrace();
    }
  }