Ejemplo n.º 1
0
  /** Load local dataset into TDB */
  private void TDBloading(String fileDump) {

    logger.info("TDB creation");

    // create tdb from .nt local file
    FileManager fm = FileManager.get();
    fm.addLocatorClassLoader(RDFTripleExtractor.class.getClassLoader());
    InputStream in = fm.open(fileDump);

    Location location = new Location(tdbDirectory);

    // load some initial data
    try {
      TDBLoader.load(
          TDBInternal.getBaseDatasetGraphTDB(TDBFactory.createDatasetGraph(location)), in, true);
    } catch (Exception e) {
      logger.error("TDB loading error: " + e.getMessage());
    }

    logger.info("TDB loading");

    // create model from tdb
    Dataset dataset = TDBFactory.createDataset(tdbDirectory);

    // assume we want the default model, or we could get a named model here
    dataset.begin(ReadWrite.READ);
    model = dataset.getDefaultModel();
    dataset.end();
  }
Ejemplo n.º 2
0
 @Override
 protected Graph createGraph() throws IOException {
   TDB.init();
   f = new File(System.getProperty("java.io.tmpdir") + "/TDBTest");
   dsGraph = TDBFactory.createDatasetGraph(f.getCanonicalPath());
   return dsGraph.getDefaultGraph();
 }
Ejemplo n.º 3
0
 /**
  * Obtain a Model from the persistent TDB store.
  *
  * @param path2db (location of the TDB store).
  * @return {@link Model}
  */
 public static Model model(String path2db) {
   // TDB.setExecutionLoggin(InfoLevel.INFO);
   TDB.getContext().set(TDB.symLogExec, false);
   TDB.getContext().set(TDB.symUnionDefaultGraph, true);
   rdf_model = TDBFactory.createModel(path2db);
   return rdf_model;
 }
Ejemplo n.º 4
0
 /**
  * Remove a Resource from the specified model. If property is null, the entire resource will be
  * removed. Otherwise only the specified property will be removed from the resource.
  *
  * @param path2db (location of the TDB store).
  * @param uri (base URI of the resource):
  * @param id (resource id).
  * @param property (property of the resource). can be null.
  */
 public static void removeResource(String path2db, String uri, String id, Property property) {
   // TDB.setExecutionLogging(InfoLevel.INFO);
   TDB.getContext().set(TDB.symLogExec, true);
   rdf_model = TDBFactory.createModel(path2db);
   if (property == null) {
     rdf_model.removeAll(rdf_model.createResource(uri + id), null, null);
   } else {
     rdf_model.removeAll(rdf_model.createResource(uri + id), property, null);
   }
 }
Ejemplo n.º 5
0
  /** Load TDB */
  private void TDBloading() {

    logger.info("TDB loading");

    // create model from tdb
    Dataset dataset = TDBFactory.createDataset(tdbDirectory);

    // assume we want the default model, or we could get a named model here
    dataset.begin(ReadWrite.READ);
    model = dataset.getDefaultModel();
    dataset.end();

    // if model is null load local dataset into TDB
    if (model == null) TDBloading(datasetFile);
  }
  // This is the inference method for getting data from OWL file
  public String sparqlInferenceMethod(String qry) {

    OntModel ont = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM_MICRO_RULE_INF, null);
    try {
      ont.read(
          new FileInputStream(
              //	"D:/AI Project/harsh/myNew.owl"),
              "SmartHospital.owl"),
          null,
          "RDF");
    } catch (FileNotFoundException e) {
      e.printStackTrace();
    }

    Reasoner reasoner = ReasonerRegistry.getOWLMicroReasoner();
    reasoner = reasoner.bindSchema(ont);

    Dataset dataset = TDBFactory.createDataset();
    Model model = dataset.getDefaultModel();

    InfModel infModel = ModelFactory.createInfModel(reasoner, model);
    String disease = null;
    String hospital = null;
    String hospitalCode = null;
    StringBuilder res = new StringBuilder();
    Query query = QueryFactory.create(qry);
    QueryExecution exec = QueryExecutionFactory.create(query, infModel);
    try {
      System.out.println("here");
      ResultSet rs = exec.execSelect();
      while (rs.hasNext()) {
        QuerySolution soln = rs.nextSolution();
        disease = soln.get("dn").toString();
        hospital = soln.get("hn").toString();
        hospitalCode = soln.get("hc").toString();
      }
    } finally {
      exec.close();
    }

    res.append(disease);
    res.append("::");
    res.append(hospital);
    res.append("::");
    res.append(hospitalCode);
    return res.toString();
  }
  @Before
  public void setup() throws Exception {
    stages = mock(Stages.class);

    status = new HashMap<String, String>();

    when(depositStatusFactory.get(anyString())).thenReturn(status);

    Dataset dataset = TDBFactory.createDataset();

    job = new BagIt2N3BagJob();
    job.setDepositUUID(depositUUID);
    job.setDepositDirectory(depositDir);
    job.setStages(stages);
    setField(job, "dataset", dataset);
    setField(job, "depositsDirectory", depositsDirectory);
    setField(job, "depositStatusFactory", depositStatusFactory);
    job.init();
  }
  public static void main(String[] args) throws IOException {
    Dataset dataset = TDBFactory.createDataset();
    Model d = dataset.getDefaultModel();
    Model m1 = dataset.getNamedModel("u1");
    Model m2 = dataset.getNamedModel("u2");

    IndexLARQ indexLARQ = AssemblerLARQ.make(dataset, null);
    LARQ.setDefaultIndex(indexLARQ);

    d.add(ResourceFactory.createResource("x"), RDFS.label, "london");
    m1.add(ResourceFactory.createResource("y"), RDFS.label, "london");
    m2.add(ResourceFactory.createResource("z"), RDFS.label, "london");
    query(dataset);

    m1.remove(
        ResourceFactory.createResource("y"),
        RDFS.label,
        ResourceFactory.createPlainLiteral("london"));
    query(dataset);
    query(dataset);
  }
  public void start() {
    String directory = TriplestoreUtil.getTriplestoreLocation();
    Dataset dataset = TDBFactory.createDataset(directory);

    ClientConfig clientConfig = new ClientConfig();
    for (Class providerClass : JenaProvidersRegistry.getProviders()) {
      clientConfig.register(providerClass);
    }
    Client rdfclient = ClientBuilder.newClient(clientConfig);
    System.out.println(
        AMESimAdapterAndTDBSubversionSyncClientWithRevision.oslcServiceProviderCatalogURI);
    Response response =
        rdfclient
            .target(
                AMESimAdapterAndTDBSubversionSyncClientWithRevision.oslcServiceProviderCatalogURI)
            .request("application/rdf+xml")
            .get();
    System.out.println(response.getStatus());
    ServiceProviderCatalog serviceProviderCatalog =
        response.readEntity(ServiceProviderCatalog.class);

    // list to collect all AMESim resources
    ArrayList<AbstractResource> oslcResourcesArrayList = new ArrayList<AbstractResource>();

    for (ServiceProvider serviceProvider : serviceProviderCatalog.getServiceProviders()) {
      System.out.println("serviceProvider " + serviceProvider.getAbout());
      if (serviceProvider.getAbout().toString().endsWith("/serviceProviders/" + fileName)) {

        for (Service service : serviceProvider.getServices()) {
          for (QueryCapability queryCapability : service.getQueryCapabilities()) {
            System.out.println(queryCapability.getQueryBase());
            Response queryCapabilityResponse =
                rdfclient
                    .target(queryCapability.getQueryBase())
                    .request("application/rdf+xml")
                    .get();
            System.out.println(queryCapabilityResponse.getStatus());
            if (queryCapability.getQueryBase().toString().endsWith("components")) {
              AMESimComponent[] oslcResources =
                  queryCapabilityResponse.readEntity(AMESimComponent[].class);
              oslcResourcesArrayList.addAll(
                  Arrays.asList(getResourcesWithVersion(oslcResources, revision)));
            }
            //						else if(queryCapability.getQueryBase().toString().endsWith("lines")){
            //							AMESimLine[] oslcResources =
            // queryCapabilityResponse.readEntity(AMESimLine[].class);
            //
            //	oslcResourcesArrayList.addAll(Arrays.asList(getResourcesWithVersion(oslcResources,
            // revision)));
            //						}
            else if (queryCapability.getQueryBase().toString().endsWith("parameters")) {
              AMESimParameter[] oslcResources =
                  queryCapabilityResponse.readEntity(AMESimParameter[].class);
              oslcResourcesArrayList.addAll(
                  Arrays.asList(getResourcesWithVersion(oslcResources, revision)));
            }
            //						else if(queryCapability.getQueryBase().toString().endsWith("outputports")){
            //							AMESimOutputPort[] oslcResources =
            // queryCapabilityResponse.readEntity(AMESimOutputPort[].class);
            //
            //	oslcResourcesArrayList.addAll(Arrays.asList(getResourcesWithVersion(oslcResources,
            // revision)));
            //						}
            //						else if(queryCapability.getQueryBase().toString().endsWith("inputports")){
            //							AMESimInputPort[] oslcResources =
            // queryCapabilityResponse.readEntity(AMESimInputPort[].class);
            //
            //	oslcResourcesArrayList.addAll(Arrays.asList(getResourcesWithVersion(oslcResources,
            // revision)));
            //						}
            //						else if(queryCapability.getQueryBase().toString().endsWith("model")){
            //							AMESimModel oslcResource =
            // queryCapabilityResponse.readEntity(AMESimModel.class);
            //							oslcResourcesArrayList.add(getResourceWithVersion(oslcResource,revision));
            //						}

          }
        }
      }
    }

    Object[] objects = oslcResourcesArrayList.toArray();

    Model model;
    Model tdbModel = dataset.getDefaultModel();
    try {
      model = JenaModelHelper.createJenaModel(objects);
      tdbModel.add(model);
    } catch (IllegalAccessException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (IllegalArgumentException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (InvocationTargetException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (DatatypeConfigurationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    } catch (OslcCoreApplicationException e) {
      // TODO Auto-generated catch block
      e.printStackTrace();
    }
    tdbModel.close();
    dataset.close();
  }
Ejemplo n.º 10
0
 /**
  * Remove all data (triples) from the specified TDB store.
  *
  * @param path2db (location of the TDB store).
  */
 public static void removeData(String path2db) {
   // TDB.setExecutionLogging(InfoLevel.INFO);
   TDB.getContext().set(TDB.symLogExec, true);
   rdf_model = TDBFactory.createModel(path2db);
   rdf_model.removeAll();
 }
Ejemplo n.º 11
0
  public static void main(String args[]) {
    String SOURCE = "http://spitfire-project.eu/ontology.rdf",
        SOURCE1 = "http://spitfire-project.eu/sn.rdf",
        NS = "http://spitfire-project.eu/ontology/ns/",
        NS1 = "http://spitfire-project.eu/ontology/ns/sn/";

    // create a model using reasoner
    OntModel model1_reasoner =
        ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM_MICRO_RULE_INF);
    OntModel model_instances =
        ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM_MICRO_RULE_INF);
    // create a model which doesn't use a reasoner
    OntModel model2_noreasoner = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);

    /** ===========1. STORE IN A NON-OwlFull MODEL==============* */
    // read the RDF/XML file
    model1_reasoner.read(SOURCE, "RDF/XML");
    model1_reasoner.read(SOURCE1, "RDF/XML");

    model2_noreasoner.read(SOURCE, "RDF/XML");

    model2_noreasoner.read(SOURCE1, "RDF/XML");
    model1_reasoner.add(
        model1_reasoner.createResource(NS + "containedIn"), RDF.type, OWL.TransitiveProperty);

    // add the instances
    //        model1_reasoner.add(model1_reasoner.createResource(NS+"fan123"), RDF.type,
    //        		model1_reasoner.createResource(NS1+"Fan"));
    //		model_instances.add(model_instances.createResource(NS+"fan123"), RDF.type,
    //				model_instances.createResource(NS1+"Fan"));

    model_instances.add(
        model_instances.getProperty(NS + "containedIn"),
        OWL.equivalentProperty,
        model_instances.createProperty(
            "http://www.ontologydesignpatterns.org/ont/dul/DUL.owl#hasLocation"));
    model_instances.add(
        model_instances.createResource(NS + "desk_a"),
        model_instances.getProperty(NS + "containedIn"),
        model_instances.createResource(NS + "floor3"));
    model_instances.add(
        model2_noreasoner.createResource(NS + "floor3"),
        model_instances.getProperty(NS + "containedIn"),
        model_instances.createResource(NS + "cti"));
    //		model1_reasoner.add(model2_noreasoner);

    model1_reasoner.add(model2_noreasoner);

    // prints out the RDF/XML structure
    printModel(model1_reasoner, null);
    printModel(model1_reasoner, model1_reasoner.getProperty(NS + "containedIn"));
    //		printModel(model2_noreasoner);

    /** ===========2. STORE IN THE TDB==============* */
    // Direct way: Make a TDB-backed dataset
    String directory =
        System.getProperty("user.dir")
            + ".ld4s/tdb"
            + LD4SConstants.SYSTEM_SEPARATOR
            + "LD4SDataset1";
    File dirf = new File(directory);
    if (!dirf.exists()) {
      dirf.mkdirs();
    }
    Dataset dataset = TDBFactory.createDataset(directory);
    TDB.sync(dataset);

    Resource subj = model1_reasoner.listSubjects().next();
    dataset.begin(ReadWrite.WRITE);
    try {
      dataset.addNamedModel(subj.getURI(), model1_reasoner);
      dataset.addNamedModel(NS + "desk_a", model_instances);
      dataset.commit();

      // Or call .abort()
    } catch (Exception e) {
      e.printStackTrace();
    } finally {
      dataset.end();
      dataset.close();
    }

    /** ===========3. QUERY==============* */
    // Create a new query
    String queryString =
        "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> "
            + "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>  "
            + "PREFIX dul: <http://www.ontologydesignpatterns.org/ont/dul/DUL.owl#>  "
            + "PREFIX spt: <"
            + NS
            + ">  "
            + "select ?uri "
            + "where { "
            + "?uri dul:hasLocation spt:cti  "
            + "} ";
    Query query = QueryFactory.create(queryString);

    System.out.println("----------------------");

    System.out.println("Query Result Sheet");

    System.out.println("----------------------");

    System.out.println("Direct&Indirect Descendants (model1)");

    System.out.println("-------------------");

    // Execute the query and obtain results
    QueryExecution qe = QueryExecutionFactory.create(query, model1_reasoner);
    com.hp.hpl.jena.query.ResultSet results = qe.execSelect();

    // Output query results
    ResultSetFormatter.out(System.out, results, query);

    qe.close();

    System.out.println("----------------------");
    System.out.println("Only Direct Descendants (model2)");
    System.out.println("----------------------");

    // Execute the query and obtain results
    qe = QueryExecutionFactory.create(query, model2_noreasoner);
    results = qe.execSelect();

    // Output query results
    ResultSetFormatter.out(System.out, results, query);
    qe.close();
  }