public void hospitalQueryProcess() {
    Model model = ModelFactory.createDefaultModel();
    InputStream in = FileManager.get().open("cancer_rdf.owl");
    if (in == null) {
      throw new IllegalArgumentException("File: not found");
    }

    // read the RDF/XML file
    model.read(in, null);

    for (int i = 0; i < treatment_list.size(); i++) {

      String treatment_name = treatment_list.get(i);
      issueSPARQLTreatment_Hospital(model, treatment_name, hos_treatment);
    }
    //	System.out.println("Hospital list  for treatments: " + Hospital_list);
    System.out.println("Hospital map  for treatments: " + hos_treatment);

    Set set = hos_treatment.entrySet();
    // Get an iterator
    Iterator i = set.iterator();
    // Display elements
    while (i.hasNext()) {
      Map.Entry me = (Map.Entry) i.next();
      System.out.print(me.getKey() + ": ");

      System.out.println(me.getValue());
      if (!Hospital_list.contains(me.getValue().toString())) {
        Hospital_list.add(me.getValue().toString());
      }
    }
    System.out.println("Hospital list  for treatments: " + Hospital_list);

    model.close();
  }
  /**
   * Upload the given file of n3 data.
   *
   * @param sFileName the name of the file to upload.
   * @param sMeetingID the id of the meeting whose data is being uploaded.
   * @throws MalformedURLException {@Link #uploadModel(com.hp.hpl.jena.rdf.model.Model,String)
   *     uploadModel} method throw it back to this method.
   */
  public void uploadFile(String sFileName, String sMeetingID) throws MalformedURLException {

    com.hp.hpl.jena.rdf.model.Model model =
        com.hp.hpl.jena.rdf.model.ModelFactory.createDefaultModel();
    model.read("file:///" + sFileName, "N3"); // $NON-NLS-1$ //$NON-NLS-2$
    uploadModel(model, sMeetingID);
    model.close();
  }
  /**
   * Write the given model to a file with the given name.
   *
   * @param model the model to write to a file.
   * @param sFileName the name of the file to write to.
   * @throws FileNodeFountException if a new FileOutputStream cannot be created.
   * @throws IOException if the FileOutputStream cannot be closed.
   */
  public void writeFile(com.hp.hpl.jena.rdf.model.Model model, String sFileName)
      throws FileNotFoundException, IOException {

    FileOutputStream out = new FileOutputStream(sFileName);
    model.write(out, "N3"); // $NON-NLS-1$
    model.close();
    out.close();
  }
  @After
  public void tearDown() throws Exception {
    if (httpclient != null) {
      httpclient.getConnectionManager().shutdown();
      httpclient = null;
    }

    if (model != null) {
      model.close();
      model = null;
    }
  }
示例#5
0
 /**
  * @see org.caboto.jena.db.Database#updateProperty(java.lang.String, java.lang.String,
  *     com.hp.hpl.jena.rdf.model.Property, com.hp.hpl.jena.rdf.model.RDFNode)
  */
 public boolean updateProperty(String uri, String resourceUri, Property property, RDFNode value) {
   try {
     log.info("Updting property in model: " + uri + " " + reasoner);
     Data data = getData();
     Model m = data.getModel(uri);
     if (!m.containsResource(ResourceFactory.createResource(resourceUri))) {
       m.close();
       data.close();
       return false;
     }
     Resource resource = m.getResource(resourceUri);
     if (resource.hasProperty(property)) {
       resource.getProperty(property).changeObject(value);
     } else {
       resource.addProperty(property, value);
     }
     m.close();
     data.close();
     return true;
   } catch (DataException e) {
     e.printStackTrace();
     return false;
   }
 }
示例#6
0
 public boolean deleteAll(String uri) {
   try {
     log.info("Deleting model: " + uri + " " + reasoner);
     Data data = getData();
     Model m = data.getModel(uri);
     m.removeAll();
     if (reasoner != null) redoInferred(uri, m, true);
     m.close();
     data.close();
     return true;
   } catch (DataException e) {
     e.printStackTrace();
     return false;
   }
 }
示例#7
0
 /**
  * @see org.caboto.jena.db.Database#addModel(java.lang.String, com.hp.hpl.jena.rdf.model.Model)
  */
 public boolean addModel(String uri, Model model) {
   try {
     log.info("Adding data to model: " + uri + " " + reasoner);
     Data data = getData();
     Model m = data.getModel(uri);
     m.withDefaultMappings(model);
     m.add(model);
     if (reasoner != null) redoInferred(uri, m, false);
     m.close();
     data.close();
     return true;
   } catch (DataException e) {
     e.printStackTrace();
     return false;
   }
 }
  /**
   * Upload the given model to the triplestore
   *
   * @param model, the model to upload.
   * @param sMeetingID the id of the meeting whose data is being uploaded.
   * @throws MalformedURLException if the urls used to create the HttpRemove and HttpAdd is
   *     malformed.
   */
  public void uploadModel(com.hp.hpl.jena.rdf.model.Model oModel, String sMeetingID)
      throws MalformedURLException {

    // System.out.println("About to try and upload: "+oModel.toString());

    com.hp.hpl.jena.rdf.model.Model oInnerModel = ModelFactory.createDefaultModel();
    Resource meeting = oInnerModel.createResource(sMeetingID);
    Property comp_is_proc =
        oInnerModel.createProperty(MEMETIC_NS, "compendium-is-processed"); // $NON-NLS-1$
    meeting.addProperty(comp_is_proc, "true"); // $NON-NLS-1$

    HttpRemove removeOp = new HttpRemove(sUrl);
    removeOp.setModel(oInnerModel);
    removeOp.exec();
    oInnerModel.close();

    HttpAdd addOp = new HttpAdd(sUrl);
    addOp.setModel(oModel);
    addOp.exec();
  }
  public void start() {
    String directory = TriplestoreUtil.getTriplestoreLocation();
    Dataset dataset = TDBFactory.createDataset(directory);

    ClientConfig clientConfig = new ClientConfig();
    for (Class providerClass : JenaProvidersRegistry.getProviders()) {
      clientConfig.register(providerClass);
    }
    Client rdfclient = ClientBuilder.newClient(clientConfig);
    System.out.println(
        AMESimAdapterAndTDBSubversionSyncClientWithRevision.oslcServiceProviderCatalogURI);
    Response response =
        rdfclient
            .target(
                AMESimAdapterAndTDBSubversionSyncClientWithRevision.oslcServiceProviderCatalogURI)
            .request("application/rdf+xml")
            .get();
    System.out.println(response.getStatus());
    ServiceProviderCatalog serviceProviderCatalog =
        response.readEntity(ServiceProviderCatalog.class);

    // list to collect all AMESim resources
    ArrayList<AbstractResource> oslcResourcesArrayList = new ArrayList<AbstractResource>();

    for (ServiceProvider serviceProvider : serviceProviderCatalog.getServiceProviders()) {
      System.out.println("serviceProvider " + serviceProvider.getAbout());
      if (serviceProvider.getAbout().toString().endsWith("/serviceProviders/" + fileName)) {

        for (Service service : serviceProvider.getServices()) {
          for (QueryCapability queryCapability : service.getQueryCapabilities()) {
            System.out.println(queryCapability.getQueryBase());
            Response queryCapabilityResponse =
                rdfclient
                    .target(queryCapability.getQueryBase())
                    .request("application/rdf+xml")
                    .get();
            System.out.println(queryCapabilityResponse.getStatus());
            if (queryCapability.getQueryBase().toString().endsWith("components")) {
              AMESimComponent[] oslcResources =
                  queryCapabilityResponse.readEntity(AMESimComponent[].class);
              oslcResourcesArrayList.addAll(
                  Arrays.asList(getResourcesWithVersion(oslcResources, revision)));
            }
            //						else if(queryCapability.getQueryBase().toString().endsWith("lines")){
            //							AMESimLine[] oslcResources =
            // queryCapabilityResponse.readEntity(AMESimLine[].class);
            //
            //	oslcResourcesArrayList.addAll(Arrays.asList(getResourcesWithVersion(oslcResources,
            // revision)));
            //						}
            else if (queryCapability.getQueryBase().toString().endsWith("parameters")) {
              AMESimParameter[] oslcResources =
                  queryCapabilityResponse.readEntity(AMESimParameter[].class);
              oslcResourcesArrayList.addAll(
                  Arrays.asList(getResourcesWithVersion(oslcResources, revision)));
            }
            //						else if(queryCapability.getQueryBase().toString().endsWith("outputports")){
            //							AMESimOutputPort[] oslcResources =
            // queryCapabilityResponse.readEntity(AMESimOutputPort[].class);
            //
            //	oslcResourcesArrayList.addAll(Arrays.asList(getResourcesWithVersion(oslcResources,
            // revision)));
            //						}
            //						else if(queryCapability.getQueryBase().toString().endsWith("inputports")){
            //							AMESimInputPort[] oslcResources =
            // queryCapabilityResponse.readEntity(AMESimInputPort[].class);
            //
            //	oslcResourcesArrayList.addAll(Arrays.asList(getResourcesWithVersion(oslcResources,
            // revision)));
            //						}
            //						else if(queryCapability.getQueryBase().toString().endsWith("model")){
            //							AMESimModel oslcResource =
            // queryCapabilityResponse.readEntity(AMESimModel.class);
            //							oslcResourcesArrayList.add(getResourceWithVersion(oslcResource,revision));
            //						}

          }
        }
      }
    }

    Object[] objects = oslcResourcesArrayList.toArray();

    Model model;
    Model tdbModel = dataset.getDefaultModel();
    try {
      model = JenaModelHelper.createJenaModel(objects);
      tdbModel.add(model);
    } catch (IllegalAccessException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (IllegalArgumentException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (InvocationTargetException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (DatatypeConfigurationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (OslcCoreApplicationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    tdbModel.close();
    dataset.close();
  }
 protected void tearDown() throws java.lang.Exception {
   model.close();
   model = null;
   conn.cleanDB();
   conn.close();
 }
  // Using Stardog with the [Jena](http://jena.apache.org) API
  // -------------------
  // In this example we'll show how to use the Stardog Jena API bindings.
  public static void main(String[] args) throws Exception {
    // Creating a Server
    // -----------------
    // You'll need a server to connect to, obviously.  For the example, lets create an embedded
    // server.
    Server aServer = Stardog.buildServer().bind(SNARLProtocolConstants.EMBEDDED_ADDRESS).start();

    try {
      // Next we'll establish a admin connection to Stardog so we can create a database to use for
      // the example
      AdminConnection aAdminConnection =
          AdminConnectionConfiguration.toEmbeddedServer().credentials("admin", "admin").connect();

      try {
        // If the database already exists, we'll drop it and create a fresh copy
        if (aAdminConnection.list().contains("testJena")) {
          aAdminConnection.drop("testJena");
        }

        aAdminConnection.createMemory("testJena");
      } finally {
        aAdminConnection.close();
      }

      // Now we open a Connection our new database
      Connection aConn =
          ConnectionConfiguration.to("testJena").credentials("admin", "admin").connect();

      // Then we obtain a Jena `Model` for the specified stardog database which is backed by our
      // `Connection`
      Model aModel = SDJenaFactory.createModel(aConn);
      try {

        // Start a transaction before adding the data.  This is not required, but it is faster to
        // group the entire add into a single transaction rather
        // than rely on the auto commit of the underlying stardog connection.
        aModel.begin();

        // Read data into the model.  note, this will add statement at a time.  Bulk loading needs
        // to be performed directly with the BulkUpdateHandler provided
        // by the underlying graph, or read in files in RDF/XML format, which uses the bulk loader
        // natively.  Alternatively, you can load data into the stardog
        // database using it's native API via the command line client.
        aModel.getReader("N3").read(aModel, new FileInputStream("data/sp2b_10k.n3"), "");

        // When you're done adding, you need to commit the changes
        aModel.commit();

        // Query that we will run against the data we just loaded
        String aQueryString =
            "select * where { ?s ?p ?o. filter(?s = <http://localhost/publications/articles/Journal1/1940/Article1>).}";

        // Create a query...
        Query aQuery = QueryFactory.create(aQueryString);

        // ... and run it
        QueryExecution aExec = QueryExecutionFactory.create(aQuery, aModel);

        try {
          // Now print the results
          ResultSetFormatter.out(aExec.execSelect(), aModel);
        } finally {
          // Always close the execution
          aExec.close();
        }
      } finally {
        // close the model to free up the connection to the stardog database
        aModel.close();
      }
    } finally {
      // You must stop the server when you're done
      aServer.stop();
    }
  }