public void setApplicationInteraction( ICognitiveAgent agent, IApplicationInteraction interaction) { INamedIndividual agentInd = findIndividualByUri(agent.getUri()); if (agentInd == null) { agent.storeCognitiveAgent(); agentInd = findIndividualByUri(agent.getUri()); } INamedIndividual interactionInd = findIndividualByUri(interaction.getUri()); if (interactionInd == null) { interaction.storeApplicationInteraction(); interactionInd = findIndividualByUri(interaction.getUri()); } IOntology onto = getAdaptationOntology(); if (onto instanceof SesameOntology) { ValueFactory factory = ((SesameOntology) onto).getRepository().getValueFactory(); URI subject = factory.createURI(agentInd.getUri()); URI predicate = factory.createURI(getEntityUri(INVOLVED_IN)); URI object = factory.createURI(interactionInd.getUri()); try { RepositoryConnection con = ((SesameOntology) onto).getRepository().getConnection(); try { con.add(subject, predicate, object); } finally { con.close(); } } catch (RepositoryException e) { e.printStackTrace(); } } }
/** **********************INSTANCES************************ */ public Collection<String> getClassInstances(String className) { Set<String> retInstances = new HashSet<>(); URI typeOfUri = this.repository.getValueFactory().createURI(typeOfUriAsText); URI classUri = this.repository.getValueFactory().createURI(className); try { RepositoryConnection repoConn = this.repository.getConnection(); RepositoryResult<Statement> results = repoConn.getStatements(null, typeOfUri, classUri, false); while (results.hasNext()) { retInstances.add(results.next().getSubject().stringValue()); } repoConn.close(); } catch (RepositoryException ex) { ex.printStackTrace(); } URI owlTypeOfUri = this.repository.getValueFactory().createURI(owlInstanceUriAsText); try { RepositoryConnection repoConn = this.repository.getConnection(); RepositoryResult<Statement> results = repoConn.getStatements(null, owlTypeOfUri, classUri, false); while (results.hasNext()) { retInstances.add(results.next().getSubject().stringValue()); } repoConn.close(); } catch (RepositoryException ex) { ex.printStackTrace(); } return retInstances; }
public void setCredentail(ICognitiveAgent agent, ICredential credential) { INamedIndividual agentInd = findIndividualByUri(agent.getUri()); if (agentInd == null) { agent.storeCognitiveAgent(); agentInd = findIndividualByUri(agent.getUri()); } INamedIndividual credentialInd = findIndividualByUri(credential.getUri()); if (credentialInd == null) { credential.storeCredential(); credentialInd = findIndividualByUri(credential.getUri()); } IOntology onto = getAdaptationOntology(); if (onto instanceof SesameOntology) { ValueFactory factory = ((SesameOntology) onto).getRepository().getValueFactory(); URI subject = factory.createURI(agentInd.getUri()); URI predicate = factory.createURI(getEntityUri(HAS_CREDENTIAL)); URI object = factory.createURI(credentialInd.getUri()); try { RepositoryConnection con = ((SesameOntology) onto).getRepository().getConnection(); try { con.add(subject, predicate, object); } finally { con.close(); } } catch (RepositoryException e) { e.printStackTrace(); } } }
// TODO : check this method (haven't check this due to the poor test ontology) public Collection<String> getEquivalentProperties(String startingProperty) { Set<String> equivalentProperties = new HashSet<>(); URI equivalentPropertyUri = this.repository.getValueFactory().createURI(this.equivalentPropertyUriAsText); URI propertyUri = this.repository.getValueFactory().createURI(startingProperty); try { RepositoryConnection repoConn = this.repository.getConnection(); RepositoryResult<Statement> results = repoConn.getStatements(null, equivalentPropertyUri, propertyUri, false); while (results.hasNext()) { equivalentProperties.add(results.next().getSubject().stringValue()); } repoConn.close(); } catch (RepositoryException ex) { ex.printStackTrace(); } try { RepositoryConnection repoConn = this.repository.getConnection(); RepositoryResult<Statement> results = repoConn.getStatements(propertyUri, equivalentPropertyUri, null, false); while (results.hasNext()) { equivalentProperties.add(results.next().getObject().stringValue()); } repoConn.close(); } catch (RepositoryException ex) { ex.printStackTrace(); } return equivalentProperties; }
private boolean isManaged( RepositoryConnection conn, Resource subject, URI predicate, Value object, String operation) { try { if (conn.hasStatement(subject, predicate, object, true, managedContext)) { // Ignore/Strip any triple that is already present in the mgmt-context (i.e. "unchanged" // props). if (log.isTraceEnabled()) { log.trace( "[{}] filtering out statement that is already present in the managed context: {}", operation, new StatementImpl(subject, predicate, object)); } return true; } else if (this.subject.equals(subject) && managedProperties.contains(predicate)) { // We do NOT allow changing server-managed properties. if (log.isTraceEnabled()) { log.trace( "[{}] filtering out statement with managed propterty {}: {}", operation, predicate, new StatementImpl(subject, predicate, object)); } deniedProperties.add(predicate); return true; } } catch (RepositoryException e) { log.error("Error while filtering server managed properties: {}", e.getMessage()); } return false; }
public Collection<String> getDisjointClasses(String startingClass) { Set<String> disjointClasses = new HashSet<>(); URI disjointClassUri = this.repository.getValueFactory().createURI(this.disjointClassUriAsText); URI classUri = this.repository.getValueFactory().createURI(startingClass); try { RepositoryConnection repoConn = this.repository.getConnection(); RepositoryResult<Statement> results = repoConn.getStatements(null, disjointClassUri, classUri, false); while (results.hasNext()) { disjointClasses.add(results.next().getSubject().stringValue()); } repoConn.close(); } catch (RepositoryException ex) { ex.printStackTrace(); } try { RepositoryConnection repoConn = this.repository.getConnection(); RepositoryResult<Statement> results = repoConn.getStatements(classUri, disjointClassUri, null, false); while (results.hasNext()) { disjointClasses.add(results.next().getObject().stringValue()); } repoConn.close(); } catch (RepositoryException ex) { ex.printStackTrace(); } return disjointClasses; }
public void setInterestingEntity(ICognitiveAgent agent, IEntity entity) { INamedIndividual agentInd = findIndividualByUri(agent.getUri()); if (agentInd == null) { agent.storeCognitiveAgent(); agentInd = findIndividualByUri(agent.getUri()); } INamedIndividual entityInd = findIndividualByUri(entity.getUri()); if (entityInd == null) { entity.storeEntity(); entityInd = findIndividualByUri(entity.getUri()); } IOntology onto = getAdaptationOntology(); if (onto instanceof SesameOntology) { ValueFactory factory = ((SesameOntology) onto).getRepository().getValueFactory(); URI subject = factory.createURI(agentInd.getUri()); URI predicate = factory.createURI(getEntityUri(HAS_INTEREST)); URI object = factory.createURI(entityInd.getUri()); try { RepositoryConnection con = ((SesameOntology) onto).getRepository().getConnection(); try { con.add(subject, predicate, object); } finally { con.close(); } } catch (RepositoryException e) { e.printStackTrace(); } } }
@Override public boolean toRdf(final Repository myRepository, final int modelVersion, final URI... keyToUse) throws OpenRDFException { super.toRdf(myRepository, modelVersion, keyToUse); final RepositoryConnection con = myRepository.getConnection(); try { if (SpinInferencingRuleImpl.DEBUG) { SpinInferencingRuleImpl.log.debug( "SparqlNormalisationRuleImpl.toRdf: keyToUse=" + keyToUse); } final URI keyUri = this.getKey(); con.setAutoCommit(false); con.add( keyUri, RDF.TYPE, SpinInferencingRuleSchema.getSpinInferencingRuleTypeUri(), keyToUse); // If everything went as planned, we can commit the result con.commit(); return true; } catch (final RepositoryException re) { // Something went wrong during the transaction, so we roll it back con.rollback(); SpinInferencingRuleImpl.log.error("RepositoryException: " + re.getMessage()); } finally { con.close(); } return false; }
public ValueLoader(String host, String port, String user, String pwd) { String connectionString = "jdbc:virtuoso://" + host + ':' + port; repository = new VirtuosoRepository(connectionString, user, pwd, true); try { RepositoryConnection con = repository.getConnection(); } catch (RepositoryException e) { throw new IllegalArgumentException(e.getMessage(), e); } }
public SesameDataSet(String sesameServer, String repositoryID) { currentRepository = new HTTPRepository(sesameServer, repositoryID); try { currentRepository.initialize(); } catch (RepositoryException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
@Override public void close() { try { connection.close(); } catch (RepositoryException ex) { ex.printStackTrace(System.err); throw new JenaException(ex); } super.close(); }
private void run() throws IOException { failed = false; for (String s : nanopubs) { count = 0; try { if (sparqlEndpointUrl != null) { if (sparqlRepo == null) { sparqlRepo = new SPARQLRepository(sparqlEndpointUrl); sparqlRepo.initialize(); } processNanopub(new NanopubImpl(sparqlRepo, new URIImpl(s))); } else { if (verbose) { System.out.println("Reading file: " + s); } MultiNanopubRdfHandler.process( new File(s), new NanopubHandler() { @Override public void handleNanopub(Nanopub np) { if (failed) return; processNanopub(np); } }); if (count == 0) { System.out.println("NO NANOPUB FOUND: " + s); break; } } } catch (OpenRDFException ex) { System.out.println("RDF ERROR: " + s); ex.printStackTrace(System.err); break; } catch (MalformedNanopubException ex) { System.out.println("INVALID NANOPUB: " + s); ex.printStackTrace(System.err); break; } if (failed) { System.out.println("FAILED TO PUBLISH NANOPUBS"); break; } } for (String s : usedServers.keySet()) { int c = usedServers.get(s); System.out.println(c + " nanopub" + (c == 1 ? "" : "s") + " published at " + s); } if (sparqlRepo != null) { try { sparqlRepo.shutDown(); } catch (RepositoryException ex) { ex.printStackTrace(); } } }
/** * Return a list of versions that affect the resource whose uri is passed as argument. For each * version, the result will contain the id, the creator, and the date when the version was * recorded. Further details for a version can be requested by calling the * /versioning/versions/{id} webservice. * * <p>Note that resource_uri is an optional parameter. In case no resource uri is given, all * versions recorded by the LMF are returned, which can take a considerable amount of time. @HTTP * 200 in case the versions were retrieved successfully @HTTP 404 in case the resource passed as * argument resource_uri could not be found * * @param resource_uri the URI of the resource for which to return the versions (optional, see * warning above) * @return a JSON list of versions, each a map with the properties "id" (long), "creator" (uri), * "date" (ISO 8601 String) */ @GET @Produces("application/json") @Path("/versions/list") public Response getVersions( @QueryParam("resource") String resource_uri, @QueryParam("from") String dateFrom, @QueryParam("to") String dateTo) { try { RepositoryConnection conn = sesameService.getConnection(); try { if (resource_uri != null) { URI resource = ResourceUtils.getUriResource(conn, resource_uri); if (resource != null && resource instanceof KiWiUriResource) { if (dateFrom == null && dateTo == null) { return Response.ok() .entity(formatVersions(versioningService.listVersions(resource))) .build(); } else { Date dateFromD = DateUtils.parseDate(dateFrom); Date dateToD = DateUtils.parseDate(dateTo); return Response.ok() .entity( formatVersions(versioningService.listVersions(resource, dateFromD, dateToD))) .build(); } } else { return Response.status(Response.Status.NOT_FOUND) .entity("resource with URI " + resource_uri + " was not found in the system") .build(); } } else { if (dateFrom == null && dateTo == null) { return Response.ok().entity(formatVersions(versioningService.listVersions())).build(); } else { Date dateFromD = DateUtils.parseDate(dateFrom); Date dateToD = DateUtils.parseDate(dateTo); return Response.ok() .entity(formatVersions(versioningService.listVersions(dateFromD, dateToD))) .build(); } } } finally { conn.commit(); conn.close(); } } catch (RepositoryException ex) { return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); } catch (SailException ex) { return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); } }
private ModelAndView getRemoveNamespaceResult(HttpServletRequest request, String prefix) throws ServerHTTPException { try { RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request); synchronized (repositoryCon) { repositoryCon.removeNamespace(prefix); } } catch (RepositoryException e) { throw new ServerHTTPException("Repository error: " + e.getMessage(), e); } return new ModelAndView(EmptySuccessView.getInstance()); }
/** * In memory Sesame Repository with optional inferencing * * @param inferencing determines whether we load the inferencer or not */ public SimpleSesameGraph(boolean inferencing) { try { if (inferencing) { therepository = new SailRepository(new ForwardChainingRDFSInferencer(new MemoryStore())); } else { therepository = new SailRepository(new MemoryStore()); } therepository.initialize(); } catch (RepositoryException e) { e.printStackTrace(); } }
/** * In memory Sesame repository with optional inferencing * * @param inferencing */ public SesameDataSet(boolean inferencing) { try { if (inferencing) { currentRepository = new SailRepository(new ForwardChainingRDFSInferencer(new MemoryStore())); } else { currentRepository = new SailRepository(new MemoryStore()); } currentRepository.initialize(); } catch (RepositoryException e) { e.printStackTrace(); } }
public SesameDataSet(String pathToDir, boolean inferencing) { File f = new File(pathToDir); try { if (inferencing) { currentRepository = new SailRepository(new ForwardChainingRDFSInferencer(new NativeStore(f))); } else { currentRepository = new SailRepository(new NativeStore(f)); } currentRepository.initialize(); } catch (RepositoryException e) { e.printStackTrace(); } }
// conect to repository public Model() { // TODO kanonika thelei to endpointurl, alla an den to trexw me to ant diavazei ayta pou // uparxoun ston kodika kai einai null tora!!! allakse to! // anti gia split valto sto config file? Alla afou einai mono sesame tha mporouse na meinei ki // etsi String endpointUrl = "http://localhost:8080/openrdf-sesame/repositories/spimbench"; // TestDriver.getConfigurations().getString(Configurations.ENDPOINT_URL); try { this.repository = new HTTPRepository(endpointUrl); this.repository.initialize(); } catch (RepositoryException ex) { ex.printStackTrace(); } }
public Collection<String> getAllTriples() { Set<String> all = new HashSet<>(); try { RepositoryConnection repoConn = this.repository.getConnection(); RepositoryResult<Statement> results = repoConn.getStatements(null, null, null, false); while (results.hasNext()) { Statement tempResult = results.next(); all.add(tempResult.toString()); } repoConn.close(); } catch (RepositoryException ex) { ex.printStackTrace(); } return all; }
public Collection<String> getSubPropertyOf(String startingProperty, boolean useInference) { Set<String> subProperties = new HashSet<>(); URI subPropertyUri = this.repository.getValueFactory().createURI(this.subPropertyOfUriAsText); URI propertyUri = this.repository.getValueFactory().createURI(startingProperty); try { RepositoryConnection repoConn = this.repository.getConnection(); RepositoryResult<Statement> results = repoConn.getStatements(null, subPropertyUri, propertyUri, useInference); while (results.hasNext()) { String subProperty = results.next().getSubject().stringValue(); if (!subProperty.equals(startingProperty)) subProperties.add(subProperty); } repoConn.close(); } catch (RepositoryException ex) { ex.printStackTrace(); } return subProperties; }
@Override public boolean schemaToRdf( final Repository myRepository, final int modelVersion, final URI... contextUri) throws OpenRDFException { final RepositoryConnection con = myRepository.getConnection(); final ValueFactory f = Constants.VALUE_FACTORY; try { con.setAutoCommit(false); con.add(OwlNormalisationRuleSchema.getOwlRuleTypeUri(), RDF.TYPE, OWL.CLASS, contextUri); con.add( OwlNormalisationRuleSchema.getOwlRuleTypeUri(), RDFS.SUBCLASSOF, ValidatingRuleSchema.getValidatingRuleTypeUri(), contextUri); con.add( OwlNormalisationRuleSchema.getOwlRuleTypeUri(), RDFS.LABEL, f.createLiteral( "An OWL normalisation rule intended to validate triples based on an OWL ontology."), contextUri); // If everything went as planned, we can commit the result con.commit(); return true; } catch (final RepositoryException re) { // Something went wrong during the transaction, so we roll it back if (con != null) { con.rollback(); } OwlNormalisationRuleSchema.LOG.error("RepositoryException: " + re.getMessage()); } finally { if (con != null) { con.close(); } } return false; }
@Override public Endpoint loadEndpoint(RepositoryInformation repoInfo) throws FedXException { File store = FileUtil.getFileLocation(repoInfo.getLocation()); if (!store.exists()) { throw new FedXRuntimeException( "Store does not exist at '" + repoInfo.getLocation() + ": " + store.getAbsolutePath() + "'."); } try { NativeStore ns = new NativeStoreExt(store); SailRepository repo = new SailRepository(ns); repo.initialize(); ProviderUtil.checkConnectionIfConfigured(repo); Endpoint res = new Endpoint( repoInfo.getId(), repoInfo.getName(), repoInfo.getLocation(), repoInfo.getType(), EndpointClassification.Local); res.setEndpointConfiguration(repoInfo.getEndpointConfiguration()); res.setRepo(repo); // register a federated service manager to deal with this endpoint SAILFederatedService federatedService = new SAILFederatedService(res); federatedService.initialize(); FederatedServiceManager.getInstance().registerService(repoInfo.getName(), federatedService); return res; } catch (RepositoryException e) { throw new FedXException( "Repository " + repoInfo.getId() + " could not be initialized: " + e.getMessage(), e); } }
private Value getRawGraph(Repository repository, URI graph) throws TransformException { RepositoryResult<Statement> s = null; try { RepositoryConnection connection = repository.getConnection(); try { List<Statement> catalogStatement = connection .getStatements( null, LODMSPredicates.RDFTYPE, LODMSPredicates.DCAT_CATALOG, false, graph) .asList(); if (catalogStatement.isEmpty()) return null; return catalogStatement.get(0).getSubject(); } catch (RepositoryException e) { connection.close(); return null; } } catch (RepositoryException e) { throw new TransformException(e.getMessage(), e); } }
public Collection<String> getSuperClasses(String startingClass, boolean useInference) { Set<String> superClasses = new HashSet<>(); URI subClassUri = this.repository.getValueFactory().createURI(this.subClassOfUriAsText); URI classUri = this.repository.getValueFactory().createURI(startingClass); try { RepositoryConnection repoConn = this.repository.getConnection(); RepositoryResult<Statement> results = repoConn.getStatements(classUri, subClassUri, null, useInference); while (results.hasNext()) { Statement tempResult = results.next(); if (!(tempResult.getObject() instanceof BNode)) { superClasses.add(tempResult.getObject().stringValue()); } } repoConn.close(); } catch (RepositoryException ex) { ex.printStackTrace(); } return superClasses; }
private ModelAndView getUpdateNamespaceResult(HttpServletRequest request, String prefix) throws IOException, ClientHTTPException, ServerHTTPException { String namespace = IOUtil.readString(request.getReader()); namespace = namespace.trim(); if (namespace.length() == 0) { throw new ClientHTTPException(SC_BAD_REQUEST, "No namespace name found in request body"); } // FIXME: perform some sanity checks on the namespace string try { RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request); synchronized (repositoryCon) { repositoryCon.setNamespace(prefix, namespace); } } catch (RepositoryException e) { throw new ServerHTTPException("Repository error: " + e.getMessage(), e); } return new ModelAndView(EmptySuccessView.getInstance()); }
@Override public void transform(Repository repository, URI graph, TransformContext context) throws TransformException { String catalogUrl = odsUrl + "id/catalog/" + catalogIdentifier + '/'; context.getCustomData().put("dcatTransformerGraph", catalogUrl); Collection<String> warnings = context.getWarnings(); URI catalogUri = valueFactory.createURI(catalogUrl); Value rawGraph = getRawGraph(repository, graph); if (rawGraph == null) { warnings.add("no catalog found"); throw new TransformException("no catalog found in raw data"); } try { RepositoryConnection connection = repository.getConnection(); try { connection.add( valueFactory.createStatement(catalogUri, rawCatalogPredicate, rawGraph), graph); connection.add( valueFactory.createStatement( catalogUri, LODMSPredicates.RDFTYPE, LODMSPredicates.DCAT_CATALOG), graph); copyCatalogAttributes(graph, catalogUri, rawGraph, connection); extractDatasetInfo(graph, catalogUri, rawGraph, connection); } catch (RepositoryException e) { warnings.add(e.getMessage()); throw new TransformException(e.getMessage(), e); } catch (MalformedQueryException e) { warnings.add(e.getMessage()); throw new TransformException(e.getMessage(), e); } catch (UpdateExecutionException e) { warnings.add(e.getMessage()); throw new TransformException(e.getMessage(), e); } finally { connection.close(); } } catch (Exception e) { throw new TransformException(e.getMessage(), e); } }
/** * Load data in specified graph (use default graph if contexts is null) * * @param filePath * @param format * @param contexts */ public void loadDataFromFile(String filePath, RDFFormat format, Resource... contexts) { RepositoryConnection con; try { con = currentRepository.getConnection(); try { // upload a file File f = new File(filePath); con.add(f, null, format, contexts); } catch (RDFParseException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (RepositoryException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { try { con.close(); } catch (RepositoryException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } catch (RepositoryException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } }
public void loadDataFromURL(String stringURL) { RepositoryConnection con; try { con = currentRepository.getConnection(); try { // upload a URL URL url = new URL(stringURL); con.add(url, null, RDFFormat.RDFXML); } catch (RDFParseException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (RepositoryException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { try { con.close(); } catch (RepositoryException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } catch (RepositoryException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } }
private ModelAndView getExportNamespaceResult(HttpServletRequest request, String prefix) throws ServerHTTPException, ClientHTTPException { try { String namespace = null; RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request); synchronized (repositoryCon) { namespace = repositoryCon.getNamespace(prefix); } if (namespace == null) { throw new ClientHTTPException(SC_NOT_FOUND, "Undefined prefix: " + prefix); } Map<String, Object> model = new HashMap<String, Object>(); model.put(SimpleResponseView.CONTENT_KEY, namespace); return new ModelAndView(SimpleResponseView.getInstance(), model); } catch (RepositoryException e) { throw new ServerHTTPException("Repository error: " + e.getMessage(), e); } }
/** * Resolve/Redirect access to /user/* uris. * * @param login the login of the user to redirect to * @param types header param of accepted mime-types * @return a redirect to the user-resource in the resource service. @HTTP 404 if no such user * exists. @HTTP 303 on success @HTTP 400 if no valid resource uri could be built with the * login @HTTP 500 on other exceptions */ @GET @Path("/{login:[^#?]+}") public Response getUser(@PathParam("login") String login, @HeaderParam("Accept") String types) { if (login.equals("me")) { return get(); } else { try { RepositoryConnection conn = sesameService.getConnection(); try { final URI user = userService.getUser(login); if (user == null) return Response.status(Status.NOT_FOUND) .entity(String.format("User %s not found", login)) .build(); java.net.URI u = new java.net.URI( configurationService.getServerUri() + "resource?uri=" + URLEncoder.encode(user.stringValue(), "utf-8")); return Response.seeOther(u).header("Accept", types).build(); } finally { conn.commit(); conn.commit(); } } catch (URISyntaxException e) { return Response.status(Status.BAD_REQUEST) .entity(String.format("Invalid URI: %s", e.getMessage())) .build(); } catch (UnsupportedEncodingException e) { return Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getMessage()).build(); } catch (RepositoryException e) { return Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getMessage()).build(); } } }