Ejemplo n.º 1
0
  public static void main(String[] args) {
    String personURI = "http://someone/doge";
    String givenName = "Doge";
    String familyName = "Jenas";
    String fullName = givenName + " " + familyName;
    String mail = "mailto:[email protected]";
    String friend = "Youcai Li";

    String inputPath = "src/main/resources/JenaManipulation.ttl";
    Model model = ModelFactory.createDefaultModel();
    model.read(inputPath, "TTL");

    model
        .createResource(personURI)
        .addProperty(VCARD.FN, fullName)
        .addProperty(VCARD.Given, givenName)
        .addProperty(VCARD.Family, familyName)
        .addProperty(FOAF.mbox, mail)
        .addProperty(FOAF.knows, model.createResource().addProperty(FOAF.name, friend));

    String outFileInTTL = "src/main/resources/JenaManipulationOutput.ttl";
    String outFileInXML = "src/main/resources/JenaManipulationOutput.xml";
    FileOutputStream outputStreamWithTTL = null, outputStreamWithXML = null;
    try {
      outputStreamWithTTL = new FileOutputStream(outFileInTTL);
      outputStreamWithXML = new FileOutputStream(outFileInXML);
    } catch (IOException e) {
      System.out.println(e);
    }
    model.write(outputStreamWithTTL, "TTL");
    model.write(outputStreamWithXML, "RDF/XML");
  }
Ejemplo n.º 2
0
  public static void main(String... argv) throws Exception {
    // //Model model = ModelFactory.createDefaultModel() ;
    // //String x = "<s> <p> 'verify it works' ." ;
    //
    //
    // //Reader sr = getTTLReader();
    // //model.read(sr, "http://example/", "TTL") ;
    // //model.read(sr, "", "TTL") ;
    // //model.read( getRDFInput() );
    // Model ttl = ModelFactory.createDefaultModel().read( getTTLInput(),
    // "", "TTL");
    // Model rdf = ModelFactory.createDefaultModel().read( getRDFInput(),
    // "", "RDF/XML-ABBREV");
    //
    // ttl.write(System.out, "RDF/XML-ABBREV") ;
    // System.out.println("-----") ;
    // // model.setNsPrefix("ex", "http://example/") ;
    // rdf.write(System.out, "N-TRIPLES") ;
    // System.out.println("-----") ;
    // System.out.println( getTTLName() );
    // System.out.println( "ttl iso rdf: "+ttl.isIsomorphicWith(rdf));
    //
    // System.out.println( getRDFName() );
    // System.out.println( "rdf iso ttl: "+rdf.isIsomorphicWith(ttl));

    String[] lines = {
      "<rdf:RDF",
      "  xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\">",
      "  <rdf:Description rdf:about=\"e\">",
      "    <p5>verify base works</p5>",
      "  </rdf:Description>",
      "</rdf:RDF>"
    };

    String eol = System.getProperty("line.separator");
    StringBuilder sb = new StringBuilder();
    for (String l : lines) {
      sb.append(l).append(eol);
    }

    Model model = ModelFactory.createDefaultModel();

    StringReader sr = new StringReader(sb.toString());
    model.read(sr, "http://example/");
    model.write(System.out, "N-TRIPLES");
    System.out.println("-----");
    model.setNsPrefix("ex", "http://example/");
    model.write(System.out, "RDF/XML-ABBREV", "http://another/");
  }
 public static void main(String[] args) {
   Model m = ModelFactory.createDefaultModel();
   String nsA = "http://somewhere/else#";
   String nsB = "http://nowhere/else#";
   Resource root = m.createResource(nsA + "root");
   Property P = m.createProperty(nsA + "P");
   Property Q = m.createProperty(nsB + "Q");
   Resource x = m.createResource(nsA + "x");
   Resource y = m.createResource(nsA + "y");
   Resource z = m.createResource(nsA + "z");
   m.add(root, P, x).add(root, P, y).add(y, Q, z);
   System.out.println("# -- no special prefixes defined");
   m.write(System.out);
   System.out.println("# -- nsA defined");
   m.setNsPrefix("nsA", nsA);
   m.write(System.out);
   System.out.println("# -- nsA and cat defined");
   m.setNsPrefix("cat", nsB);
   m.write(System.out, "TURTLE");
 }
Ejemplo n.º 4
0
 public void testWithContent() throws IOException {
   File f = FileUtils.tempFileName("assembler-acceptance-", ".n3");
   Model data = model("a P b; b Q c");
   try (FileOutputStream fs = new FileOutputStream(f)) {
     data.write(fs, "N3");
   }
   Resource root =
       resourceInModel(
           "x rdf:type ja:MemoryModel; x ja:content y; y ja:externalContent file:"
               + f.getAbsolutePath());
   Model m = Assembler.general.openModel(root);
   assertIsoModels(data, m);
 }
Ejemplo n.º 5
0
  public static void main(String[] args) {
    DOMParser domparser = new DOMParser();

    LogCtl.setLog4j("jena-log4j.properties");

    Model model = ModelFactory.createDefaultModel();

    domparser.buildModel(model);

    String fileName = "10Authors.rdf";

    FileWriter out;
    try {
      out = new FileWriter(fileName);
      model.write(out, "RDF/XML-ABBREV");
      out.close();
    } catch (IOException e1) {
      // TODO Auto-generated catch block
      e1.printStackTrace();
    }
  }
Ejemplo n.º 6
0
  private void _genSFPinQueue(List<String> keywords, UUID jobID) {
    // create timestamp
    Date now = Calendar.getInstance().getTime();
    // create jobqueue entry
    jobqueue.put(jobID, new SFPGenJob(SFPGenJob.PROCESSING, now));
    System.out.println("[_genSFPinQueue] size of job queue: " + jobqueue.size());
    System.out.println("Job accessible @ " + this.jobqueue.toString());
    // map for the semantic concepts found in the ontology and their
    // corresponding keyword, used for searching them
    Map<String, String> correspondingKeywords = new HashMap<String, String>();

    KeyWordSearch s = new KeyWordSearch();
    List<SearchResult> res = s.search(keywords, maxSearchResults, correspondingKeywords);
    System.out.println("Resultlist from KW search: " + res);
    List<String> request = KeyWordSearch.toUriList(res);
    System.out.println("Starting BFS...");
    BreadthFirstSearch lc = new BreadthFirstSearch();
    ResultSet result = lc.getConnections(request, maxSearchDepth);
    System.out.println("...Done");

    // -- 2) create the graph
    System.out.println("Creating the initial graph...");
    WTPGraph graph = WTPGraph.createFromResultSet(result, "Semantic Fingerprint");
    System.out.println("...Done");

    // -- 3) remove specific edges
    // graph.removeEdgesByName("ject");
    // graph.removeEdgesByName("paradigm");
    // graph.removeEdgesByName("influencedBy");
    // graph.removeEdgesByName("influenced");
    // graph.removeEdgesByName("typing");
    // graph.removeEdgesByName("license");

    // -- 4) tidy graph
    System.out.print(
        "Tidying graph ("
            + graph.getNodeCount()
            + " Nodes, "
            + graph.getEdgeCount()
            + " Edges) ...");
    GraphCleaner c = new GraphCleaner(graph.getGraph(), result.requestNodes);
    LinkedList<graph.GraphCleaner.Path> paths = c.clean(maxPathLength, maxPathExtensionLength);
    System.out.println(
        " Done ("
            + graph.getNodeCount()
            + " Nodes, "
            + graph.getEdgeCount()
            + " Edges, "
            + paths.size()
            + " Paths)");

    // --4.2) heuristics finger print selection
    InterConceptConntecting heuristic = new InterConceptConntecting();

    /** Filters all Nodes that have paths to other Nodes which correspond to a different keyword */
    // heuristic.filterInterconntection(graph, paths,
    // correspondingKeywords);

    /** Filters the n Nodes which occur most frequently in the paths */
    heuristic.filterNMostFrequentlyOccuring(
        graph, paths, numRelevantNodesFilter, correspondingKeywords);

    /** Selects the cluster which corresponds to the most different keywords */
    heuristic.filterClusterByInterconnectionLevel(graph, correspondingKeywords);

    /** Selects the biggest cluster */
    heuristic.filterClusterBySize(graph);

    /** Selects the cluster whose nodes occur most frequently in the paths */
    // ArrayList<ArrayList<String>> graph = new ArrayList<ArrayString>();
    // convert WTP graph to RDF
    Model rdfgraph = WTPGraph.getRDFGraph(graph);
    rdfgraph.write(System.out);
    /*
     * ObjectMapper mapper = new ObjectMapper();
     *
     *
     * try { return
     * makeCORS(Response.status(Status.OK).entity(mapper.writeValueAsString
     * (rdfgraph.write(System.out))), ""); } catch (JsonGenerationException
     * e) { // TODO Auto-generated catch block e.printStackTrace(); } catch
     * (JsonMappingException e) { // TODO Auto-generated catch block
     * e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated
     * catch block e.printStackTrace();
     *
     * } return makeCORS(Response.status(Status.OK), "");
     */

    OutputStream output =
        new OutputStream() {
          private StringBuilder string = new StringBuilder();

          @Override
          public void write(int b) throws IOException {
            this.string.append((char) b);
          }

          public String toString() {
            return this.string.toString();
          }
        };
    rdfgraph.write((OutputStream) output);
    // put result in sfplist
    this.sfplist.put(jobID, output.toString());

    // get the job object of current jobid and update it
    SFPGenJob currJob = this.jobqueue.get(jobID);
    currJob.updateStatus(SFPGenJob.FINISHED);
    // update timestamp
    now = Calendar.getInstance().getTime();
    currJob.updateTimestamp(now);
    this.jobqueue.put(jobID, currJob);
  }