コード例 #1
0
  public static void main(String args[]) {

    // some definitions
    String personURI = "http://somewhere/JohnSmith";
    String givenName = "John";
    String familyName = "Smith";
    String fullName = givenName + " " + familyName;
    // create an empty model
    Model model = ModelFactory.createDefaultModel();

    // create the resource
    //   and add the properties cascading style
    Resource johnSmith =
        model
            .createResource(personURI)
            .addProperty(VCARD.FN, fullName)
            .addProperty(
                VCARD.N,
                model
                    .createResource()
                    .addProperty(VCARD.Given, givenName)
                    .addProperty(VCARD.Family, familyName));

    // now write the model in XML form to a file
    model.write(System.out);
  }
コード例 #2
0
 /**
  * Add to <code>toAdd</code> all the superclass statements needed to note that any indirect
  * subclass of <code>X = parents.item</code> has as superclass all the classes between it and X
  * and all the remaining elements of <code>parents</code>.
  */
 private static void addSuperClasses(Model m, LinkedSeq parents, Model toAdd) {
   Resource type = parents.item;
   for (StmtIterator it = m.listStatements(null, RDFS.subClassOf, type); it.hasNext(); ) {
     Resource t = it.nextStatement().getSubject();
     for (LinkedSeq scan = parents.rest; scan != null; scan = scan.rest)
       toAdd.add(t, RDFS.subClassOf, scan.item);
     addSuperClasses(m, parents.push(t), toAdd);
   }
 }
コード例 #3
0
  public static void main(String args[]) {
    // create an empty model
    Model model = ModelFactory.createDefaultModel();

    // create the resource
    Resource johnSmith = model.createResource(personURI);

    // add the property
    johnSmith.addProperty(VCARD.FN, fullName);
  }
コード例 #4
0
 protected static void addDomainTypes(Model result, Model schema) {
   for (StmtIterator it = schema.listStatements(ANY, RDFS.domain, ANY); it.hasNext(); ) {
     Statement s = it.nextStatement();
     Property property = s.getSubject().as(Property.class);
     RDFNode type = s.getObject();
     for (StmtIterator x = result.listStatements(ANY, property, ANY); x.hasNext(); ) {
       Statement t = x.nextStatement();
       result.add(t.getSubject(), RDF.type, type);
     }
   }
 }
コード例 #5
0
 protected static void addRangeTypes(Model result, Model schema) {
   Model toAdd = ModelFactory.createDefaultModel();
   for (StmtIterator it = schema.listStatements(ANY, RDFS.range, ANY); it.hasNext(); ) {
     Statement s = it.nextStatement();
     RDFNode type = s.getObject();
     Property property = s.getSubject().as(Property.class);
     for (StmtIterator x = result.listStatements(ANY, property, ANY); x.hasNext(); ) {
       RDFNode ob = x.nextStatement().getObject();
       if (ob.isResource()) toAdd.add((Resource) ob, RDF.type, type);
     }
   }
   result.add(toAdd);
 }
コード例 #6
0
 public void testFindSubject() {
   StmtIterator iter = model.listStatements(new SimpleSelector(null, null, RDFS.Resource));
   assertTrue(iter.hasNext());
   Resource subject = iter.nextStatement().getSubject();
   iter.close();
   iter = model.listStatements(new SimpleSelector(subject, null, (RDFNode) null));
   int i = 0;
   while (iter.hasNext()) {
     i++;
     Statement stmt = iter.nextStatement();
     assertEquals(subject, stmt.getSubject());
   }
   assertEquals(3, i);
 }
コード例 #7
0
 protected static void addSupertypes(Model result) {
   Model temp = ModelFactory.createDefaultModel();
   for (StmtIterator it = result.listStatements(ANY, RDF.type, ANY); it.hasNext(); ) {
     Statement s = it.nextStatement();
     Resource c = AssemblerHelp.getResource(s);
     for (StmtIterator subclasses = result.listStatements(c, RDFS.subClassOf, ANY);
         subclasses.hasNext(); ) {
       RDFNode type = subclasses.nextStatement().getObject();
       // System.err.println( ">> adding super type: subject " + s.getSubject() + ", type " + type
       // );
       temp.add(s.getSubject(), RDF.type, type);
     }
   }
   result.add(temp);
 }
コード例 #8
0
  protected void setUp() throws java.lang.Exception {

    conn = TestConnection.makeAndCleanTestConnection();
    model = ModelRDB.createModel(conn, TestPackage.M_DB);

    model
        .createResource()
        .addProperty(RDF.type, RDFS.Resource)
        .addProperty(RDFS.label, "foo")
        .addProperty(RDF.value, "123");
    model
        .createResource()
        .addProperty(RDF.type, RDFS.Resource)
        .addProperty(RDFS.label, "bar")
        .addProperty(RDF.value, "123");
  }
コード例 #9
0
 /** Answer the subset of <code>classes</code> which have no superclass in <code>m</code>. */
 private static Set<Resource> selectRootClasses(Model m, Set<RDFNode> classes) {
   Set<Resource> roots = new HashSet<Resource>();
   for (Iterator<RDFNode> it = classes.iterator(); it.hasNext(); ) {
     Resource type = (Resource) it.next();
     if (!m.contains(type, RDFS.subClassOf, (RDFNode) null)) roots.add(type);
   }
   return roots;
 }
コード例 #10
0
 public void testAll() {
   StmtIterator iter = model.listStatements(new SimpleSelector(null, null, (RDFNode) null));
   int i = 0;
   while (iter.hasNext()) {
     i++;
     iter.next();
   }
   assertEquals(6, i);
 }
コード例 #11
0
 public void testFindObject() {
   StmtIterator iter = model.listStatements(new SimpleSelector(null, null, RDFS.Resource));
   int i = 0;
   while (iter.hasNext()) {
     i++;
     Statement stmt = iter.nextStatement();
     assertEquals(RDFS.Resource, stmt.getObject());
   }
   assertEquals(2, i);
 }
コード例 #12
0
 public void testFindProperty() {
   StmtIterator iter = model.listStatements(new SimpleSelector(null, RDFS.label, (RDFNode) null));
   int i = 0;
   while (iter.hasNext()) {
     i++;
     Statement stmt = iter.nextStatement();
     assertEquals(RDFS.label, stmt.getPredicate());
   }
   assertEquals(2, i);
 }
コード例 #13
0
 public void testFindPropertyAndObject() {
   StmtIterator iter = model.listStatements(new SimpleSelector(null, RDF.value, 123));
   int i = 0;
   while (iter.hasNext()) {
     i++;
     Statement stmt = iter.nextStatement();
     assertEquals(RDF.value, stmt.getPredicate());
     assertEquals(123, stmt.getInt());
   }
   assertEquals(2, i);
 }
コード例 #14
0
  /**
   * agINFRA Social visualization components aux. tool
   *
   * @param args
   * @throws FileNotFoundException
   */
  public static void main(String[] args) throws FileNotFoundException {

    System.out.println("Hello");

    // PARAMS
    if (args.length != 5) {
      System.out.println(
          "Proper Arguments are: [Dataset files full path] [URL store] [URI graph base] [Destination directory] [commit to 4store]");
      System.out.println(
          "Example: java -jar xxxxx.jar /home/carlos/Desktop/agINFRA-workflow/workflow/loms/bioe/ http://localhost:81 http://laclo.laflor /home/carlos/Desktop/agINFRA-workflow/workflow/loms/bioerdf/ false");
      System.exit(0);
    }

    String dspath = args[0];
    String urlStore = args[1];
    String uriGraph = args[2];
    String destination = args[3];
    String commit4store_arg = args[4];

    /*String dspath = "/home/carlos/workspace/WebAPI/ds/";
    String urlStore = "http://4store.ipb.ac.rs:81";
    String uriGraph = "http://aginfra.eu";
    String localCouchdbProxy = "localhost";
    String commit4store_arg = "no";*/

    boolean commit4store = false;
    if (commit4store_arg.equals("commit")) commit4store = true;

    String output = "0"; // valor de retorno
    String status = "ERROR";
    String errorDescription = "";
    String tmpMetadataFile = "";
    // check if type is valid
    boolean bContinuar = true;
    // StorageService service;
    ArrayList<KeyValue> keyValues = new ArrayList<KeyValue>();
    HashMap fileDatasetMap = new HashMap();

    /*
    //Fetch and download IPB metadata sets.
    //CouchDB via PHP local proxy
    //http://agro.ipb.ac.rs/agcouchdb/_design/datasets/_view/list?limit=10
    //http://localhost/ag_couch_proxy/proxy-IPB-datasets.php
    try{
    	System.out.println("Connecting IPB CouchDB...");

    	String url = "http://"+localCouchdbProxy+"/ag_couch_proxy/proxy-IPB-datasets.php?dspath="+dspath;
    	WebResource webResource = Client.create().resource(url);
    	//System.out.println(url);
    	ClientResponse response = webResource.accept(MediaType.APPLICATION_JSON,MediaType.TEXT_HTML,MediaType.WILDCARD).get(ClientResponse.class);
    	if (response.getStatus() != 200) {
    	   throw new RuntimeException("Failed : HTTP error code : " + response.getStatus());
    	}


    	//String response_str = response.getEntity(String.class);	//I don't know why this does not work when running in shell
    	String response_str = getStringFromInputStream(response.getEntityInputStream());
    	//System.out.println(response_str);	//debug

    	System.out.println("Finished IPB call");



    	System.out.println("Reading Dataset Map...");

    	//READ CSV
    	//create BufferedReader to read csv file
              StringTokenizer st = null;
              int lineNumber = 0, tokenNumber = 0;

              //read comma separated file line by line
              Scanner scanner = new Scanner(response_str);
              while (scanner.hasNextLine())
              {
                      lineNumber++;
                      String strLine = scanner.nextLine();

                      //break comma separated line using ","
                      st = new StringTokenizer(strLine, ",");

                      String datasetFile = "";
                      String datasetName = "";

                      while(st.hasMoreTokens())
                      {
                              //display csv values
                              tokenNumber++;
                              //System.out.println("Line # " + lineNumber +", Token # " + tokenNumber + ", Token : "+ st.nextToken());
                              if(tokenNumber == 1)
                              	datasetFile = st.nextToken();
                              if(tokenNumber == 2)
                              	datasetName = st.nextToken();
                      }

                      fileDatasetMap.put(datasetFile,datasetName);

                      //reset token number
                      tokenNumber = 0;

              }

              System.out.println("Finished Map reading");

    } catch (Exception e) {
    	e.printStackTrace();
    }
    */

    /*
    //foreach dataset.tar.gz **
    //Iterate
    File root = new File("ds/");
    Collection files = FileUtils.listFiles(root, null, false);

    //mini db processed files
    ArrayList processed = new ArrayList();
          try {
          	BufferedReader br = new BufferedReader(new FileReader("processed.txt"));
              String line;
          	while((line = br.readLine()) != null) {
          		processed.add(line);
    	}
    } catch (IOException e) {
    	e.printStackTrace();
    }

    System.out.println("Iterating all downloaded datasets tgz files...");
    int dsCount = 0;

    for (Iterator iterator = files.iterator(); iterator.hasNext();) {
    	File dsFile = (File) iterator.next();
    	String inputDataset = dsFile.getAbsolutePath();

    	dsCount = dsCount + 1;
    	System.out.println("  Processing "+dsCount+":"+inputDataset);	//debug

    	//po5i: mini db processed files
    	if(processed.contains(inputDataset)){
    		System.out.println("    >>Already processed... skipping... ");
    		continue;
    	}
    	else
    	{
    		processed.add(inputDataset);
    		try {
    			FileWriter fileWritter = new FileWriter("processed.txt",true);
    			BufferedWriter bufferWritter = new BufferedWriter(fileWritter);
       	        bufferWritter.write(inputDataset+"\n");
       	        bufferWritter.close();
    		} catch (IOException e) {
    			e.printStackTrace();
    		}
    	}

    	//Set the GraphID
    	String graphID = (String) fileDatasetMap.get(dsFile.getName());
    	System.out.println("    Graph:: "+graphID);



    	//Uncompress the dataset and iterate throughout the files
    	try {
    		FileInputStream fin = new FileInputStream(inputDataset);
    		BufferedInputStream in = new BufferedInputStream(fin);
    		FileOutputStream out = new FileOutputStream("ds/archive.tar");
    		GzipCompressorInputStream gzIn;
    		gzIn = new GzipCompressorInputStream(in);
    		final byte[] buffer = new byte[1024];
    		int n = 0;
    		while (-1 != (n = gzIn.read(buffer))) {
    		    out.write(buffer, 0, n);
    		}
    		out.close();
    		gzIn.close();

    		//read the tar
    		File input = new File("ds/archive.tar"); //getFile("ds/archive.tar");
            InputStream is = new FileInputStream(input);
            ArchiveInputStream in1 = new ArchiveStreamFactory().createArchiveInputStream("tar", is);
            TarArchiveEntry entry = (TarArchiveEntry)in1.getNextEntry();

            while (entry != null) {// create a file with the same name as the tarEntry
                File destPath = new File("ds/extract/" + entry.getName());
                if (entry.isDirectory()) {
                    destPath.mkdirs();
                } else {
                    destPath.createNewFile();
                    OutputStream out1 = new FileOutputStream(destPath);
                    IOUtils.copy(in1, out1);
                    out1.close();
                }
                entry = (TarArchiveEntry)in1.getNextEntry();
            }

            in1.close();
    	} catch (Exception e) {
    		e.printStackTrace();
    	}*/

    // Iterate on extracted files
    try {
      File root1 = new File(dspath);
      Collection files1 = FileUtils.listFiles(root1, null, true);
      // new File(dspath+"../rdf").mkdir();

      for (Iterator iterator1 = files1.iterator(); iterator1.hasNext(); ) {
        File lomFile = (File) iterator1.next();
        String inputFile = lomFile.getAbsolutePath();

        // System.out.println("      Processing:"+inputFile);	//debug

        if (bContinuar) {
          // save metadata stream in a local file
          tmpMetadataFile = inputFile;
          String valid = "1";
          // valid = XMLValidator.validate(tmpMetadataFile,
          //		StorageService.getXSDFile(storageType));
          boolean hasSource = false;
          if (tmpMetadataFile.length() > 0) {
            // TODO: metadata validation
            // valid = "1";
            if (valid.equalsIgnoreCase("1")) {
              // generate id for the new material

              // output = graphID;
              // save metatada in rdf
              // obtain key-value pairs
              try {
                LOMParser.parseToKeyValue(tmpMetadataFile);
                if (!LOMParser.keyValues.isEmpty()) {
                  keyValues = LOMParser.keyValues;
                }
              } catch (MalformedURLException e1) {
                e1.printStackTrace();
              } catch (IOException e1) {
                e1.printStackTrace();
              }

              if (!keyValues.isEmpty()) {

                int canSave = 1;

                if (canSave > 0) {

                  // 4store
                  // save to rdf this triple (slow)
                  // HandleGraph graph = new HandleGraph(urlStore,uriGraph);
                  // result = graph.AppendTriple(graphID, keyValues);

                  // 4store
                  // prepare RDF file (better)
                  try {
                    // HELP: http://www.roseindia.net/tutorials/rdf/generateRDF.shtml

                    Model model = ModelFactory.createDefaultModel();

                    for (KeyValue kv : keyValues) {
                      String s = uriGraph + "/" + lomFile.getName();
                      // String p = URLEncoder.encode(kv.getKey(),"UTF-8");
                      String p = kv.getKey().replaceAll("[^\\w\\s\\.]", "_");
                      String v = kv.getValue();

                      // obtener el autor del CDATA en variable v
                      if (v.contains("CDATA")) {
                        v = v.replace("<![CDATA[", "");
                        v = v.replace("]]>", "");

                        VCardEngine vcardEngine = new VCardEngine();
                        VCard vcard = vcardEngine.parse(v);
                        if (vcard.hasFN()) v = vcard.getFN().getFormattedName();
                        else if (vcard.hasN()) v = vcard.getN().getFamilyName();
                        else {
                          // format string it can be parsed.

                          StringBuffer sb;
                          sb = new StringBuffer(v);
                          sb.insert(v.indexOf("VERSION:"), "\n");
                          v = sb.toString();

                          sb = new StringBuffer(v);
                          sb.insert(v.indexOf(" FN:") + 1, "\n");
                          v = sb.toString();

                          sb = new StringBuffer(v);
                          sb.insert(v.indexOf(" N:") + 1, "\n");
                          v = sb.toString();

                          sb = new StringBuffer(v);
                          sb.insert(v.indexOf("ORG:"), "\n");
                          v = sb.toString();

                          sb = new StringBuffer(v);
                          sb.insert(v.indexOf("EMAIL:"), "\n");
                          v = sb.toString();

                          sb = new StringBuffer(v);
                          sb.insert(v.indexOf("END:"), "\n");
                          v = sb.toString();

                          vcard = vcardEngine.parse(v);
                          if (vcard.hasFN()) v = vcard.getFN().getFormattedName();
                          else if (vcard.hasN()) v = vcard.getN().getFamilyName();
                          else {
                            System.out.println(" ~ ~ Problem with:::" + v);
                            System.out.println(" ~ ~ When Processing:" + inputFile); // debug
                          }

                          // System.out.println(" ~ author is: "+v);
                        }
                      }

                      // System.out.println("p: "+p+"\t\t\t v: "+v);

                      Property lom_prop = model.createProperty("http://ltsc.ieee.org/xsd/LOM#" + p);
                      Resource node = model.createResource(s).addProperty(lom_prop, v);
                    }

                    FileOutputStream fop = null;
                    File rdfFile =
                        new File(destination + lomFile.getName().replace(".xml", ".rdf"));
                    fop = new FileOutputStream(rdfFile);

                    // model.write(System.out);
                    model.write(fop);

                    // 4store
                    if (commit4store) {
                      HandleGraph graph = new HandleGraph(urlStore, uriGraph);
                      int result = graph.AppendGraph(rdfFile.getAbsolutePath()); // returns 0-1
                    }

                  } catch (Exception e) {
                    e.printStackTrace();
                  }
                  // break;	//debug

                }
                output = "1";
              } else {
                output = "0";
                errorDescription = "Could not handle metadata to key-value";
              }

            } else {
              errorDescription = "XML Validation:" + valid;
            }
          } else {
            errorDescription = "Could not handle metadata file";
          }
        }
      }
    } catch (Exception e) {
      e.printStackTrace();
    }

    /*//break;	//debug

    			//Borrar todo lo de ds/extract/, rdf y el archive.tar para liberar espacio
    			try {
    				FileUtils.deleteDirectory(new File("ds/extract/"));
    				FileUtils.deleteDirectory(new File("ds/rdf/"));
    				FileUtils.deleteQuietly(new File("ds/archive.tar"));

    			} catch (IOException e) {
    				e.printStackTrace();
    			}

    		}
    */

    // prepare response
    if (output.contentEquals("0")) status = "ERROR: " + errorDescription;
    else status = "OK";

    System.out.println(status);
  }
コード例 #15
0
 protected void tearDown() throws java.lang.Exception {
   model.close();
   model = null;
   conn.cleanDB();
   conn.close();
 }
コード例 #16
0
 /**
  * Answer the set of all classes which appear in <code>m</code> as the subject or object of a
  * <code>rdfs:subClassOf</code> statement.
  */
 private static Set<RDFNode> findClassesBySubClassOf(Model m) {
   Set<RDFNode> classes = new HashSet<RDFNode>();
   StmtIterator it = m.listStatements(null, RDFS.subClassOf, (RDFNode) null);
   while (it.hasNext()) addClasses(classes, it.nextStatement());
   return classes;
 }
コード例 #17
0
 /**
  * To each subclass X of <code>parents.item</code> add as superclass all the classes between X and
  * that item and all the items in the rest of <code>parents</code>.
  */
 private static void addSuperClasses(Model m, LinkedSeq parents) {
   Model toAdd = ModelFactory.createDefaultModel();
   addSuperClasses(m, parents, toAdd);
   m.add(toAdd);
 }
コード例 #18
0
 protected static void addSubclassesFrom(Model result, Model schema) {
   for (StmtIterator it = schema.listStatements(ANY, RDFS.subClassOf, ANY); it.hasNext(); ) {
     Statement s = it.nextStatement();
     if (s.getSubject().isURIResource() && s.getObject().isURIResource()) result.add(s);
   }
 }
コード例 #19
0
 private static Set<Resource> subjectSet(Model result, Resource S, Property P, RDFNode O) {
   return result.listStatements(S, P, O).mapWith(Statement.Util.getSubject).toSet();
 }
コード例 #20
0
 private static void addIntersections(Model result, Model schema) {
   StmtIterator it = schema.listStatements(ANY, OWL.intersectionOf, ANY);
   while (it.hasNext()) addIntersections(result, schema, it.nextStatement());
 }