/** * Term constants for the DC music ontology * * @author Michael Grove * @since 0.1 */ public class MusicOntology extends Vocabulary { public static final String ONT_URI = "http://purl.org/ontology/mo/"; private static MusicOntology INSTANCE = null; private MusicOntology() { super(ONT_URI); } public static MusicOntology ontology() { if (INSTANCE == null) { INSTANCE = new MusicOntology(); } return INSTANCE; } // properties public final URI track = term("track"); public final URI release_type = term("release_type"); public final URI release_status = term("release_status"); public final URI track_number = term("track_number"); public final URI length = term("length"); public final URI made = term("made"); public final URI musicbrainz = term("musicbrainz"); public final URI olga = term("olga"); public final URI genre = term("genre"); public final URI sample_rate = term("sample_rate"); public final URI bitsPerSample = term("bitsPerSample"); // cp properties public final URI rating = term("rating"); public final URI albumRating = term("albumRating"); public final URI year = term("year"); public final URI location = term("location"); // classes public final URI Genre = term("Genre"); public final URI Record = term("Record"); public final URI Track = term("Track"); public final URI MusicArtist = term("MusicArtist"); public final URI MusicGroup = term("MusicGroup"); // individuals public final URI Metal = FACTORY.createURI(Genre.stringValue() + "/Metal"); public final URI Rock = FACTORY.createURI(Genre.stringValue() + "/Rock"); public final URI Alternative = FACTORY.createURI(Genre.stringValue() + "/Alternative"); public final URI Pop = FACTORY.createURI(Genre.stringValue() + "/Pop"); public final URI Punk = FACTORY.createURI(Genre.stringValue() + "/Punk"); public final URI Funk = FACTORY.createURI(Genre.stringValue() + "/Funk"); public final URI Soundtrack = FACTORY.createURI(Genre.stringValue() + "/Soundtrack"); public final URI Blues = FACTORY.createURI(Genre.stringValue() + "/Blues"); public final URI Jazz = FACTORY.createURI(Genre.stringValue() + "/Jazz"); public final URI Vocal = FACTORY.createURI(Genre.stringValue() + "/Vocal"); public final URI Country = FACTORY.createURI(Genre.stringValue() + "/Country"); public final URI album = term("album"); public final URI official = term("official"); }
private static ReferencingObjectMap extractReferencingObjectMap( SesameDataSet r2rmlMappingGraph, Resource object, Set<GraphMap> graphMaps, Map<Resource, TriplesMap> triplesMapResources) throws InvalidR2RMLStructureException, InvalidR2RMLSyntaxException { log.debug("[RMLMappingFactory:extractReferencingObjectMap] Extract referencing object map.."); URI parentTriplesMap = (URI) extractValueFromTermMap(r2rmlMappingGraph, object, R2RMLTerm.PARENT_TRIPLES_MAP); Set<JoinCondition> joinConditions = extractJoinConditions(r2rmlMappingGraph, object); if (parentTriplesMap == null && !joinConditions.isEmpty()) { throw new InvalidR2RMLStructureException( "[RMLMappingFactory:extractReferencingObjectMap] " + object.stringValue() + " has no parentTriplesMap map defined whereas one or more joinConditions exist" + " : exactly one parentTripleMap is required."); } if (parentTriplesMap == null && joinConditions.isEmpty()) { log.debug( "[RMLMappingFactory:extractReferencingObjectMap] This object map is not a referencing object map."); return null; } // Extract parent boolean contains = false; TriplesMap parent = null; for (Resource triplesMapResource : triplesMapResources.keySet()) { if (triplesMapResource.stringValue().equals(parentTriplesMap.stringValue())) { contains = true; parent = triplesMapResources.get(triplesMapResource); log.debug( "[RMLMappingFactory:extractReferencingObjectMap] Parent triples map found : " + triplesMapResource.stringValue()); break; } } if (!contains) { throw new InvalidR2RMLStructureException( "[RMLMappingFactory:extractReferencingObjectMap] " + object.stringValue() + " reference to parent triples maps is broken : " + parentTriplesMap.stringValue() + " not found."); } // Link between this reerencing object and its triplesMap parent will be // performed // at the end f treatment. ReferencingObjectMap refObjectMap = new StdReferencingObjectMap(null, parent, joinConditions); log.debug( "[RMLMappingFactory:extractReferencingObjectMap] Extract referencing object map done."); return refObjectMap; }
private Pair toPair(Edge e, Graph graph) { URI predicate = e.getPredicate(); Value object = e.getObject(); String value = null; if (object instanceof Literal) { Literal literal = (Literal) object; String language = literal.getLanguage(); URI type = literal.getDatatype(); if (type.equals(XMLSchema.STRING)) { type = null; } StringBuilder builder = new StringBuilder(); builder.append('"'); builder.append(literal.getLabel()); builder.append('"'); if (language != null) { builder.append('@'); builder.append(language); } else if (type != null) { builder.append('^'); builder.append(type.stringValue()); } value = builder.toString(); } else if (object instanceof URI) { value = object.stringValue(); } else { Resource id = (Resource) object; Vertex v = graph.getVertex(id); value = createHash(predicate, v); } return new Pair(predicate, value); }
/** * Insert Triple/Statement into graph * * @param s subject uriref * @param p predicate uriref * @param o value object (URIref or Literal) * @param contexts varArgs context objects (use default graph if null) */ public void add(Resource s, URI p, Value o, Resource... contexts) { if (log.isDebugEnabled()) log.debug( "[SesameDataSet:add] Add triple (" + s.stringValue() + ", " + p.stringValue() + ", " + o.stringValue() + ")."); try { RepositoryConnection con = currentRepository.getConnection(); try { ValueFactory myFactory = con.getValueFactory(); Statement st = myFactory.createStatement((Resource) s, p, (Value) o); con.add(st, contexts); con.commit(); } catch (Exception e) { e.printStackTrace(); } finally { con.close(); } } catch (Exception e) { // handle exception } }
public boolean equals(final Object o) { if (this == o) return true; if (o instanceof FullyInlineURIIV<?>) { return uri.stringValue().equals(((FullyInlineURIIV<?>) o).stringValue()); } return false; }
public boolean matches(final Vertex vertex, final Value value) { String kind = (String) vertex.getProperty(KIND); String val = (String) vertex.getProperty(VALUE); if (value instanceof URI) { return kind.equals(URI) && val.equals(value.stringValue()); } else if (value instanceof Literal) { if (kind.equals(LITERAL)) { if (!val.equals(((Literal) value).getLabel())) { return false; } String type = (String) vertex.getProperty(TYPE); String lang = (String) vertex.getProperty(LANG); URI vType = ((Literal) value).getDatatype(); String vLang = ((Literal) value).getLanguage(); return null == type && null == vType && null == lang && null == vLang || null != type && null != vType && type.equals(vType.stringValue()) || null != lang && null != vLang && lang.equals(vLang); } else { return false; } } else if (value instanceof BNode) { return kind.equals(BNODE) && ((BNode) value).getID().equals(val); } else { throw new IllegalStateException("value of unexpected kind: " + value); } }
private Collection<String> getEntryForUriSuffix(Map<URI, Collection<String>> map, String suffix) { for (URI resource : map.keySet()) { if (resource.stringValue().endsWith(suffix)) { return map.get(resource); } } return null; }
public static void setLabel( RepositoryConnection repCon, org.openrdf.model.URI concept, org.openrdf.model.URI labelType, Literal newlabel, PersistAndNotifyProvider persistAndNotifyProvider) throws RepositoryException { repCon.add(concept, labelType, newlabel); persistAndNotifyProvider.persistAndNotify( Helper.createChangeSetModel( concept.stringValue(), labelType.stringValue(), newlabel, ChangeTripleService.CHANGETYPE_ADD), true); }
private Response get(URI user) { if (userService.isAnonymous(user)) { AccountPoJo apj = new AccountPoJo(Namespaces.ANONYMOUS_LOGIN, user.stringValue()); return Response.ok(apj, Namespaces.MIME_TYPE_JSON) .location(java.net.URI.create(user.stringValue())) .build(); } try { RepositoryConnection conn = sesameService.getConnection(); try { final UserAccount a = accountService.getAccount(user); if (a != null) { AccountPoJo apj = new AccountPoJo(a.getLogin(), a.getWebId()); apj.setRoles(a.getRoles()); for (Statement t : ResourceUtils.listOutgoing(conn, conn.getValueFactory().createURI(a.getWebId()))) { String prop = t.getPredicate().stringValue(); if (prop.startsWith(Namespaces.NS_FOAF)) { Value object = t.getObject(); if (object instanceof org.openrdf.model.URI) { apj.setFoaf(prop, String.format("<%s>", object)); } else if (object instanceof Literal) { apj.setFoaf(prop, object.toString()); } } } return Response.ok(apj, Namespaces.MIME_TYPE_JSON) .location(java.net.URI.create(user.stringValue())) .build(); } return Response.status(Status.NOT_FOUND) .entity("Could not find account data of " + user) .build(); } finally { conn.commit(); conn.close(); } } catch (RepositoryException e) { // This must not happen! return Response.serverError().entity(e).build(); } }
@Override public int compareTo(Pair other) { int result = predicate.stringValue().compareTo(other.predicate.stringValue()); if (result == 0) { result = value.compareTo(other.value); } return result; }
private void reify(URI activity, URI entity, Resource subj, URI pred, Value obj, Resource ctx) throws SailException { String ns = activity.stringValue(); if (ctx instanceof URI) { super.addStatement(activity, informedBy, ctx, activity); } if (entity == null || GENERATED_BY.equals(pred.stringValue())) return; URI operation = vf.createURI(ns + "#" + hash(ctx, entity)); if (ctx instanceof URI) { super.addStatement(ctx, qualifiedUsage, operation, activity); } super.addStatement(activity, qualifiedUsage, operation, activity); super.addStatement(operation, usedEntity, entity, activity); Resource node = vf.createBNode(); super.addStatement(operation, changed, node, activity); super.addStatement(node, subject, subj, activity); super.addStatement(node, predicate, pred, activity); super.addStatement(node, object, obj, activity); }
@SuppressWarnings("unchecked") public V asValue(final LexiconRelation lex) { V v = getValueCache(); if (v == null) { final BigdataValueFactory f = lex.getValueFactory(); v = (V) f.createURI(uri.stringValue()); v.setIV(this); setValue(v); } return v; }
@Override public int _compareTo(final IV o) { final FullyInlineURIIV<?> t = (FullyInlineURIIV<?>) o; return IVUnicode.IVUnicodeComparator.INSTANCE.compare(uri.stringValue(), t.uri.stringValue()); // return uri.stringValue().compareTo(id2); // return id == id2 ? 0 : id < id2 ? -1 : 1; }
/** * LDP-Style to serialize a resource. * * @param writer the writer to serialize to * @param subject the resource to serialize * @param iteration the Iteration containing the data * @throws RDFHandlerException * @throws RepositoryException */ public static void exportIteration( RDFWriter writer, URI subject, CloseableIteration<Statement, RepositoryException> iteration) throws RDFHandlerException, RepositoryException { writer.startRDF(); writer.handleNamespace(LDP.PREFIX, LDP.NAMESPACE); writer.handleNamespace(RDF.PREFIX, RDF.NAMESPACE); writer.handleNamespace(XSD.PREFIX, XSD.NAMESPACE); writer.handleNamespace(DCTERMS.PREFIX, DCTERMS.NAMESPACE); writer.handleNamespace("parent", subject.getNamespace()); writer.handleNamespace("child", subject.stringValue().replaceFirst("/*$", "/")); writer.handleNamespace("this", subject.stringValue().replaceFirst("/*$", "#")); while (iteration.hasNext()) { writer.handleStatement(iteration.next()); } writer.endRDF(); }
@Override public void addUrlImport(final URI nextURLImport) { this.urlImports.add(nextURLImport); final OntModel nextModel = SpinUtils.loadModelFromUrl(nextURLImport.stringValue()); if (nextModel != null) { SpinInferencingRuleImpl.log.info( "adding model to registry and ontology model list nextImport=" + nextURLImport.stringValue() + " nextModel.size()=" + nextModel.size()); this.ontologyModels.add(nextModel); this.getSpinModuleRegistry().registerAll(nextModel, nextURLImport.stringValue()); } else { SpinInferencingRuleImpl.log.error( "Failed to load import from URL nextURLImport=" + nextURLImport.stringValue()); } this.getSpinModuleRegistry().init(); }
public int byteLength() { if (byteLength == 0) { // Cache the byteLength if not yet set. byteLength = 1 // flags + IVUnicode.byteLengthUnicode(uri.stringValue()) // ; } return byteLength; }
public synchronized BlobObject getBlobObject(final String uri) throws RepositoryException { if (blobs == null) throw new RepositoryException("No configured blob store"); try { if (blobVersion == null && isAutoCommit()) { return blobs.open(uri); } else if (blobVersion == null) { URI version = getVersionBundle(); if (version == null) { blobVersion = blobs.newVersion(); } else { blobVersion = blobs.newVersion(version.stringValue()); } return blobVersion.open(uri); } else { return blobVersion.open(uri); } } catch (IOException exc) { throw new RepositoryException(exc); } }
protected void testValueRoundTrip(Resource subj, URI pred, Value obj) throws Exception { con.begin(); con.addStatement(subj, pred, obj); con.commit(); CloseableIteration<? extends Statement, SailException> stIter = con.getStatements(null, null, null, false); try { assertTrue(stIter.hasNext()); Statement st = stIter.next(); assertEquals(subj, st.getSubject()); assertEquals(pred, st.getPredicate()); assertEquals(obj, st.getObject()); assertTrue(!stIter.hasNext()); } finally { stIter.close(); } ParsedTupleQuery tupleQuery = QueryParserUtil.parseTupleQuery( QueryLanguage.SERQL, "SELECT S, P, O FROM {S} P {O} WHERE P = <" + pred.stringValue() + ">", null); CloseableIteration<? extends BindingSet, QueryEvaluationException> iter; iter = con.evaluate(tupleQuery.getTupleExpr(), null, EmptyBindingSet.getInstance(), false); try { assertTrue(iter.hasNext()); BindingSet bindings = iter.next(); assertEquals(subj, bindings.getValue("S")); assertEquals(pred, bindings.getValue("P")); assertEquals(obj, bindings.getValue("O")); assertTrue(!iter.hasNext()); } finally { iter.close(); } }
/** * Extract content URIs from a term type resource. * * @return * @throws InvalidR2RMLStructureException */ private static Set<URI> extractURIsFromTermMap( SesameDataSet r2rmlMappingGraph, Resource termType, R2RMLTerm term) throws InvalidR2RMLStructureException { URI p = getTermURI(r2rmlMappingGraph, term); List<Statement> statements = r2rmlMappingGraph.tuplePattern(termType, p, null); if (statements.isEmpty()) { return null; } Set<URI> uris = new HashSet<URI>(); for (Statement statement : statements) { URI uri = (URI) statement.getObject(); log.debug( "[RMLMappingFactory:extractURIsFromTermMap] Extracted " + term + " : " + uri.stringValue()); uris.add(uri); } return uris; }
private RDFValueRange fillObjectRange(Value oVal) { if (oVal instanceof URI) { URI uri = (URI) oVal; PrefixRange pr = new PrefixRange(); pr.getPrefixList().add(uri.stringValue()); return new RDFValueRange( new RDFURIRange(pr.getPrefixList()), new RDFLiteralRange(Collections.<URI, RangeLength<?>>emptyMap())); } else if (oVal instanceof Literal) { Literal l = (Literal) oVal; Range literalRange = null; if (l.getDatatype().equals(XMLSchema.INT)) literalRange = new RDFLiteralRange(XMLSchema.INT, new IntervalRange(l.intValue(), l.intValue())); else if (l.getDatatype().equals(XMLSchema.LONG)) literalRange = new RDFLiteralRange( XMLSchema.LONG, new IntervalRange((int) l.longValue(), (int) l.longValue())); else if (l.getDatatype().equals(XMLSchema.STRING)) { PrefixRange pr = new PrefixRange(); pr.getPrefixList().add(l.stringValue()); literalRange = new RDFLiteralRange(XMLSchema.STRING, pr); } else if (l.getDatatype().equals(XMLSchema.DATETIME)) { Calendar cal = l.calendarValue().toGregorianCalendar(); CalendarRange cr = new CalendarRange(cal.getTime(), cal.getTime()); literalRange = new RDFLiteralRange(XMLSchema.DATETIME, cr); } if (literalRange != null) return new RDFValueRange( new RDFURIRange(Collections.<String>emptyList()), (RDFLiteralRange) literalRange); } return new RDFValueRange(); }
/** * Resolve/Redirect access to /user/* uris. * * @param login the login of the user to redirect to * @param types header param of accepted mime-types * @return a redirect to the user-resource in the resource service. @HTTP 404 if no such user * exists. @HTTP 303 on success @HTTP 400 if no valid resource uri could be built with the * login @HTTP 500 on other exceptions */ @GET @Path("/{login:[^#?]+}") public Response getUser(@PathParam("login") String login, @HeaderParam("Accept") String types) { if (login.equals("me")) { return get(); } else { try { RepositoryConnection conn = sesameService.getConnection(); try { final URI user = userService.getUser(login); if (user == null) return Response.status(Status.NOT_FOUND) .entity(String.format("User %s not found", login)) .build(); java.net.URI u = new java.net.URI( configurationService.getServerUri() + "resource?uri=" + URLEncoder.encode(user.stringValue(), "utf-8")); return Response.seeOther(u).header("Accept", types).build(); } finally { conn.commit(); conn.commit(); } } catch (URISyntaxException e) { return Response.status(Status.BAD_REQUEST) .entity(String.format("Invalid URI: %s", e.getMessage())) .build(); } catch (UnsupportedEncodingException e) { return Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getMessage()).build(); } catch (RepositoryException e) { return Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getMessage()).build(); } } }
public String toString() { URI uri = getVersionBundle(); if (uri == null) return getDelegate().toString(); return uri.stringValue(); }
/** * Asynchronously explains the triple ({@code subject}, {@code predicate}, {@code object} ) * * @param subject The subject * @param predicate The predicate * @param object The object * @param callback The {@link Callback} to execute after the explanation is done */ public void property(Resource subject, URI predicate, Resource object, Callback<Graph> callback) { String query = selectQuery(subject.stringValue(), predicate.stringValue(), object.stringValue()); query(query, callback); }
/** * Explains the triple ({@code subject}, {@code predicate}, {@code object} ) * * @param subject The subject * @param predicate The predicate * @param object The object * @return The explanation {@link org.openrdf.model.Graph Graph} * @throws PelletClientException if there is an error while querying */ public Graph property(Resource subject, URI predicate, Resource object) throws PelletClientException { String query = selectQuery(subject.stringValue(), predicate.stringValue(), object.stringValue()); return query(query); }
/** * Reduction of the set of vertices composing the graph. * * <p>------------------------------------------------------------------- IMPORTANT: If modified, * this documentation must also be modified in the class GActionType. * ------------------------------------------------------------------- * * <p>Accepted parameters are: * * <ul> * <li>regex: specify a REGEX in Java syntax which will be used to test if the value associated * to a vertex makes it eligible to be removed. If the value match the REGEX, the vertex * will be removed * <li>vocabulary: Remove all the vertices associated to the vocabularies specified. Accepted * vocabularies flag are RDF, RDFS, OWL. Several vocabularies can be specified using comma * separator. * <li>file_uris: specify a list of files containing URIs corresponding to the vertices to * remove. Multiple files can be specified using comma separator. * </ul> * * @param factory the factory to consider if element requires to be generated (e.g. {@link URI}) * @param action the action to perform * @param g the graph on which the action must be performed * @throws SLIB_Ex_Critic */ private static void verticeReduction(URIFactory factory, GAction action, G g) throws SLIB_Ex_Critic { logger.info("-------------------------------------"); logger.info(" Vertices Reduction"); logger.info("-------------------------------------"); logger.info("Starting " + GActionType.VERTICES_REDUCTION); String regex = (String) action.getParameter("regex"); String vocVal = (String) action.getParameter("vocabulary"); String file_uris = (String) action.getParameter("file_uris"); String rootURIs = (String) action.getParameter("root_uri"); Set<URI> classes = GraphAccessor.getClasses(g); Set<URI> instances = GraphAccessor.getInstances(g); logger.info("Classes : " + classes.size()); logger.info("instances: " + instances.size()); logger.info("vertices : " + g.getV().size()); Set<URI> toRemove = new HashSet<URI>(); if (rootURIs != null) { /* * Reduce the Graph considering all classes subsumed by the given root vertex * Instances annotated by those classes are also conserved into the graph, others are removed. */ logger.info( "Applying reduction of the part of the graph " + g.getURI() + " which is not contained in the graph induced by " + rootURIs + " (only the classes subsumed by the given root are considered)"); try { URI rootURI = factory.getURI(rootURIs); if (!g.containsVertex(rootURI)) { throw new SLIB_Ex_Critic( "Error cannot state vertex associated to URI " + rootURI + " in graph " + g.getURI()); } DescendantEngine descEngine = new DescendantEngine(g); Set<URI> descsInclusive = descEngine.getDescendantsInc(rootURI); logger.info(descsInclusive.size() + " subclasses of " + rootURI + " detected"); int classesNb = classes.size(); Set<URI> classesToRemove = classes; classesToRemove.removeAll(descsInclusive); logger.info( "Removing " + classesToRemove.size() + "/" + classesNb + " classes of the graph"); g.removeV(classesToRemove); // We then remove the entities which are not // linked to the graph current underlying taxonomic graph Set<URI> instancesToRemove = new HashSet<URI>(); for (URI v : instances) { // No links to taxonomic graph anymore // we check the URI as is not considered as both instance and class if (!descsInclusive.contains(v) && g.getV(v, RDF.TYPE, Direction.OUT).isEmpty()) { instancesToRemove.add(v); } } logger.info("Removing " + instancesToRemove.size() + " instances of the graph"); g.removeV(instancesToRemove); } catch (IllegalArgumentException e) { throw new SLIB_Ex_Critic( "Error value specified for parameter root_uri, i.e. " + rootURIs + " cannot be converted into an URI"); } } else if (regex != null) { logger.info("Applying regex: " + regex); Pattern pattern; try { pattern = Pattern.compile(regex); } catch (PatternSyntaxException e) { throw new SLIB_Ex_Critic( "The specified regex '" + regex + "' is invalid: " + e.getMessage()); } Matcher matcher; for (URI v : g.getV()) { matcher = pattern.matcher(v.stringValue()); if (matcher.find()) { toRemove.add(v); logger.debug("regex matches: " + v); } } logger.info("Vertices to remove: " + toRemove.size() + "/" + g.getV().size()); g.removeV(toRemove); logger.debug("ending " + GActionType.VERTICES_REDUCTION); } else if (vocVal != null) { String[] vocs = vocVal.split(","); for (String voc : vocs) { if (voc.trim().equals("RDF")) { logger.info("Removing RDF vocabulary"); removeVocURIs(factory, getRDFVocURIs(), g); } else if (voc.trim().equals("RDFS")) { logger.info("Removing RDFS vocabulary"); removeVocURIs(factory, getRDFSVocURIs(), g); } else if (voc.trim().equals("OWL")) { logger.info("Removing OWL vocabulary"); removeVocURIs(factory, getOWLVocURIs(), g); } } } else if (file_uris != null) { String[] files = file_uris.split(","); for (String f : files) { logger.info("Removing Uris specified in " + f); try { FileInputStream fstream = new FileInputStream(f.trim()); DataInputStream in = new DataInputStream(fstream); BufferedReader br = new BufferedReader(new InputStreamReader(in)); String line; while ((line = br.readLine()) != null) { line = line.trim(); g.removeV(factory.getURI(line)); } in.close(); } catch (IOException e) { throw new SLIB_Ex_Critic(e.getMessage()); } } } logger.info("vertices reduction performed"); logger.info("-------------------------------------"); }
/** Implements {@link Value#stringValue()}. */ @Override public String stringValue() { return uri.stringValue(); }
public static String uri(URI uri) { return uri(uri.stringValue()); }
public final String toString() { return uri.stringValue(); }
public BlobObject getBlobObject(URI uri) throws RepositoryException { return getBlobObject(uri.stringValue()); }
/** * Implementation using the org.json API. * * @param graph A Sesame Graph. * @return An RDF/JSON string if successful, otherwise null. */ public static String graphToRdfJson(Graph graph) { JSONObject result = new JSONObject(); try { Set<Resource> subjects = new HashSet<Resource>(); for (Statement s1 : graph) { subjects.add(s1.getSubject()); } for (Resource subject : subjects) { JSONObject predicateObj = new JSONObject(); Set<URI> predicates = new HashSet<URI>(); Iterator<Statement> s2 = graph.match(subject, null, null); while (s2.hasNext()) { predicates.add(s2.next().getPredicate()); } for (URI predicate : predicates) { JSONArray valueArray = new JSONArray(); Iterator<Statement> stmnts = graph.match(subject, predicate, null); Set<Value> objects = new HashSet<Value>(); while (stmnts.hasNext()) { objects.add(stmnts.next().getObject()); } for (Value object : objects) { Iterator<Statement> stmnts2 = graph.match(subject, predicate, object); JSONArray contexts = new JSONArray(); int i = 0; boolean nonDefaultContext = false; while (stmnts2.hasNext()) { Resource context = stmnts2.next().getContext(); contexts.put(i, null == context ? null : context.toString()); if (null != context) { nonDefaultContext = true; } i++; } JSONObject valueObj = new JSONObject(); valueObj.put("value", object.stringValue()); if (object instanceof Literal) { valueObj.put("type", "literal"); Literal l = (Literal) object; if (l.getLanguage() != null) { valueObj.put("lang", l.getLanguage()); } else if (l.getDatatype() != null) { valueObj.put("datatype", l.getDatatype().stringValue()); } } else if (object instanceof BNode) { valueObj.put("type", "bnode"); } else if (object instanceof URI) { valueObj.put("type", "uri"); } if (nonDefaultContext) { valueObj.put("graphs", contexts); } valueArray.put(valueObj); } predicateObj.put(predicate.stringValue(), valueArray); } result.put(subject.stringValue(), predicateObj); } return result.toString(2); } catch (JSONException e) { log.error(e.getMessage(), e); } return null; }