@Override public List<Literal> getDataPropertyValuesForIndividualByProperty( String subjectUri, String propertyUri) { log.debug("Data property value query string:\n" + DATA_PROPERTY_VALUE_QUERY_STRING); log.debug("Data property value:\n" + dataPropertyValueQuery); QuerySolutionMap initialBindings = new QuerySolutionMap(); initialBindings.add("subject", ResourceFactory.createResource(subjectUri)); initialBindings.add("property", ResourceFactory.createResource(propertyUri)); // Run the SPARQL query to get the properties List<Literal> values = new ArrayList<Literal>(); DatasetWrapper w = dwf.getDatasetWrapper(); Dataset dataset = w.getDataset(); dataset.getLock().enterCriticalSection(Lock.READ); try { QueryExecution qexec = QueryExecutionFactory.create(dataPropertyValueQuery, dataset, initialBindings); ResultSet results = qexec.execSelect(); while (results.hasNext()) { QuerySolution soln = results.next(); RDFNode node = soln.get("value"); if (!node.isLiteral()) { continue; } Literal value = soln.getLiteral("value"); values.add(value); } } finally { dataset.getLock().leaveCriticalSection(); w.close(); } return values; }
private static boolean hasPermission( String userPrefix, String username, String uri, Property[] permissions) { Set<Property> permissionSet = new HashSet<Property>(); Resource resource = configModel.createResource(uri); Resource user; if (username == null) user = PERM.Public; else user = configModel.createResource(userPrefix + username); for (Property permission : permissions) { if (configModel.contains(user, permission, resource)) return true; permissionSet.add(permission); } StmtIterator stmts = user.listProperties(); while (stmts.hasNext()) { Statement stmt = stmts.next(); if (!permissionSet.contains(stmt.getPredicate())) continue; RDFNode resourceMatch = stmt.getObject(); if (!(resourceMatch.isResource() && configModel.contains((Resource) resourceMatch, RDF.type, PERM.ResourceMatch))) continue; RDFNode matchRegex = ((Resource) resourceMatch).getProperty(PERM.matchExpression).getObject(); if (matchRegex == null || !matchRegex.isLiteral()) continue; try { if (uri.matches(((Literal) matchRegex).getString())) return true; } catch (PatternSyntaxException e) { } } if (username != null) return hasPermission(userPrefix, null, uri, permissions); else return false; }
/** * Query SPARQL endpoint with a SELECT query * * @param qExec QueryExecution encapsulating the query * @return model retrieved by querying the endpoint */ private Model getSelectModel(QueryExecution qExec) { Model model = ModelFactory.createDefaultModel(); Graph graph = model.getGraph(); ResultSet results = qExec.execSelect(); while (results.hasNext()) { QuerySolution sol = results.next(); String subject; String predicate; RDFNode object; try { subject = sol.getResource("s").toString(); predicate = sol.getResource("p").toString(); object = sol.get("o"); } catch (NoSuchElementException e) { logger.error("SELECT query does not return a (?s ?p ?o) Triple"); continue; } Node objNode; if (object.isLiteral()) { Literal obj = object.asLiteral(); objNode = NodeFactory.createLiteral(obj.getString(), obj.getDatatype()); } else { objNode = NodeFactory.createLiteral(object.toString()); } graph.add( new Triple(NodeFactory.createURI(subject), NodeFactory.createURI(predicate), objNode)); } return model; }
private int getMaxRank(String objectUri, String subjectUri, VitroRequest vreq) { int maxRank = 0; // default value if (objectUri == null) { // adding new webpage String queryStr = QueryUtils.subUriForQueryVar(MAX_RANK_QUERY, "subject", subjectUri); log.debug("Query string is: " + queryStr); try { ResultSet results = QueryUtils.getQueryResults(queryStr, vreq); if (results != null && results.hasNext()) { // there is at most one result QuerySolution soln = results.next(); RDFNode node = soln.get("rank"); if (node != null && node.isLiteral()) { // node.asLiteral().getInt() won't return an xsd:string that // can be parsed as an int. int rank = Integer.parseInt(node.asLiteral().getLexicalForm()); if (rank > maxRank) { log.debug("setting maxRank to " + rank); maxRank = rank; } } } } catch (NumberFormatException e) { log.error("Invalid rank returned from query: not an integer value."); } catch (Exception e) { log.error(e, e); } } return maxRank; }
@SuppressWarnings("unchecked") private static List<RDFNode> getURIObjects() { List<RDFNode> objectsURIs = new ArrayList<RDFNode>(); if (demo) { // Deserialize the results if exists (For Demo purpose) if (useCache) { try { List<String> ser = new ArrayList<String>(); File file = new File("URIObjects.ser"); if (file.exists()) { ObjectInputStream in; in = new ObjectInputStream(new FileInputStream(file)); ser = (List<String>) in.readObject(); in.close(); // convert every object back from string for (String n : ser) { objectsURIs.add(ResourceFactory.createResource(n)); } return objectsURIs; } } catch (Exception e) { e.printStackTrace(); } } } // create a query to retrieve URIs objects String queryString = "SELECT * " + "WHERE { ?s ?p ?o . FILTER (isURI(?o)) . " + "FILTER (STRSTARTS(STR(?o), \"" + resourcePrefix + "\"))}"; Query query = QueryFactory.create(queryString); QueryExecution exec = QueryExecutionFactory.create(query, localModel); ResultSet rs = exec.execSelect(); while (rs.hasNext()) { QuerySolution sol = rs.next(); RDFNode object = sol.get("?o"); objectsURIs.add(object); } if (demo) { // serialize the output (for Demo purpose) try { FileOutputStream fileOut = new FileOutputStream("URIObjects.ser"); ObjectOutputStream out = new ObjectOutputStream(fileOut); // convert to Serializabe Strings List<String> l = new ArrayList<String>(); for (RDFNode n : objectsURIs) { l.add(n.toString()); } out.writeObject(l); out.close(); } catch (Exception e2) { e2.printStackTrace(); } } return objectsURIs; }
public static Map<String, RDFNode> solutionMap(QuerySolution qs, List<String> vars) { Map<String, RDFNode> result = new HashMap<String, RDFNode>(); for (String var : vars) { RDFNode val = qs.get(var); result.put(var, val.isAnon() ? DUMMY_FOR_BNODE : val); } return result; }
/** * Answer the string described by the value of the unique optional <code>classProperty</code> * property of <code>root</code>, or null if there's no such property. The value may be a URI, in * which case it must be a <b>java:</b> URI with content the class name; or it may be a literal, * in which case its lexical form is its class name; otherwise, BOOM. */ public static String getOptionalClassName(Resource root, Property classProperty) { RDFNode classNode = getUnique(root, classProperty); return classNode == null ? null : classNode.isLiteral() ? classNode.asNode().getLiteralLexicalForm() : classNode.isResource() ? mustBeJava(classNode.asNode().getURI()) : null; }
@Override public Country getCountryIdentifier() { RDFNode countryIdentifier = getPropertyValue(mdrDatabase.getVocabulary().countryIdentifier); if (countryIdentifier == null) { logger.debug("LanguageIdentification does not have countryIdentifier"); return null; } return Country.getByValue(countryIdentifier.asLiteral().getString()); }
private void resolveChildren(ModelTreeNode modelTreeNode, String oldModelUrl) throws IOException { ZdoModel model = modelTreeNode.getModel(); // If there is also a published version of this doc, find it, remove it and adopt all its // children if (oldModelUrl != null) { // Change its children to be ours String queryString = "SELECT ?subject WHERE {\n" + " ?subject <" + DCTerms.isPartOf.getURI() + "> <" + oldModelUrl + ">.\n" + "}"; QueryExecution queryExecution = QueryExecutionFactory.sparqlService(SPARQL_ENDPOINT, queryString); ResultSet resultSet = queryExecution.execSelect(); while (resultSet.hasNext()) { QuerySolution querySolution = resultSet.next(); RDFNode childNode = querySolution.get("subject"); String childToAdoptUrl = childNode.asResource().getURI(); ZdoModel childModel = store.get(childToAdoptUrl); childModel.replaceValueOfProperty( DCTerms.isPartOf, store.removeTransactionFromUrl(model.getUrl())); // If this children was published if (ZdoGroup.ZDO.name().equals(childModel.get(ZdoTerms.group))) { // Is this children getting replaced by newer version? if (modelTreeNodeKdrIndex.containsKey(childModel.get(ZdoTerms.kdrObject))) { // Yes, unpublish it ZdoModel childKdrObject = store.get(childModel.get(ZdoTerms.kdrObject)); markAsUnpublished(childModel, childKdrObject); store.update(childKdrObject); } else { // No, it should be added to our tree to solr ModelTreeNode childModelTreeNode = new ModelTreeNode(); childModelTreeNode.setModel(childModel); modelTreeNodeKdrIndex.put(childModel.get(ZdoTerms.kdrObject), childModelTreeNode); modelTreeNodeIndex.put( store.removeTransactionFromUrl(childModel.getUrl()), modelTreeNode); modelTreeNode.getChildren().add(childModelTreeNode); } } store.update(childModel); } } // Recurse on children for (ModelTreeNode childNode : modelTreeNode.getChildren()) { // Get kdr version of this doc ZdoModel kdrDoc = store.get(childNode.getModel().get(ZdoTerms.kdrObject)); resolveChildren(childNode, kdrDoc.get(ZdoTerms.newestPublished)); kdrDoc.replaceValueOfProperty( ZdoTerms.newestPublished, store.removeTransactionFromUrl(childNode.getModel().getUrl())); store.update(kdrDoc); } }
private boolean isAbstract(OntProperty p) { List<RDFNode> list = p.listPropertyValues(RDF.type).toList(); for (RDFNode node : list) { if (node.canAs(Resource.class)) { Resource r = node.asResource(); if (BindVocabulary.AbstractProperty.getURI().equals(r.getURI())) return true; } } return false; }
@Override public void processResults() { String queryBegin = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>" + "PREFIX mstr: <http://methodo-stat-tutor.com#>" + "SELECT ?NOTION " + "WHERE {"; String queryEnd = "}"; // create a query that asks for the color of the wine that // would go with each meal course String queryStr2 = queryBegin + " ?EXO mstr:traiteNotion ?NOTION ." + "FILTER (?EXO = mstr:" + this.exercise + ")\n" + queryEnd; log.error(queryStr2); Query query2 = QueryFactory.create(queryStr2); ResultSet results = SparqlDLExecutionFactory.create(query2, ontModel).execSelect(); ArrayList<java.net.URI> needNotion = new ArrayList<java.net.URI>(); ArrayList<java.net.URI> giveNotion = new ArrayList<java.net.URI>(); while (results.hasNext()) { RDFNode a = results.next().get("?NOTION"); giveNotion.add(java.net.URI.create(a.asLiteral().getString())); needNotion.addAll(getSuperClassFromUri(a.asLiteral().getString())); } // remove duplicates from neednotion Set<java.net.URI> hs = new HashSet<>(); hs.addAll(needNotion); needNotion.clear(); needNotion.addAll(hs); // end remove duplicate this.needNotion = needNotion; this.giveNotion = giveNotion; // ajout des notions au modele int i = 0; while (i < needNotion.size()) { log.error(needNotion.get(i).toString()); addStatementToOntModel("needNotion", needNotion.get(i).toString()); i++; } i = 0; while (i < giveNotion.size()) { log.error(giveNotion.get(i).toString()); addStatementToOntModel("giveNotion", giveNotion.get(i).toString()); i++; } }
public boolean doGet( MappedResource resource, Property property, boolean isInverse, HttpServletRequest request, HttpServletResponse response, Configuration config) throws IOException { Model descriptions = getAnonymousPropertyValues(resource, property, isInverse); if (descriptions.size() == 0) { return false; } Resource r = descriptions.getResource(resource.getWebURI()); List resourceDescriptions = new ArrayList(); StmtIterator it = isInverse ? descriptions.listStatements(null, property, r) : r.listProperties(property); while (it.hasNext()) { Statement stmt = it.nextStatement(); RDFNode value = isInverse ? stmt.getSubject() : stmt.getObject(); if (!value.isAnon()) { continue; } resourceDescriptions.add( new ResourceDescription((Resource) value.as(Resource.class), descriptions, config)); } Model description = getResourceDescription(resource); ResourceDescription resourceDescription = new ResourceDescription(resource, description, config); String title = resourceDescription.getLabel() + (isInverse ? " ? " : " ? ") + config.getPrefixes().getNsURIPrefix(property.getNameSpace()) + ":" + property.getLocalName(); VelocityHelper template = new VelocityHelper(getServletContext(), response); Context context = template.getVelocityContext(); context.put("project_name", config.getProjectName()); context.put("project_link", config.getProjectLink()); context.put("title", title); context.put("server_base", config.getWebApplicationBaseURI()); context.put("sparql_endpoint", resource.getDataset().getDataSource().getEndpointURL()); context.put("back_uri", resource.getWebURI()); context.put("back_label", resourceDescription.getLabel()); context.put( "rdf_link", isInverse ? resource.getInversePathDataURL(property) : resource.getPathDataURL(property)); context.put("resources", resourceDescriptions); template.renderXHTML("pathpage.vm"); return true; }
private void handleSubpropertyOf(OntProperty p, OntProperty ancestor) { List<RDFNode> list = ancestor.listPropertyValues(RDFS.subPropertyOf).toList(); for (RDFNode node : list) { if (!node.canAs(OntProperty.class)) continue; OntProperty superProperty = node.as(OntProperty.class); if (superProperty.equals(ancestor)) continue; addFields(p, superProperty); handleSubpropertyOf(p, superProperty); } }
private static void doTypeString(StringBuffer json, RDFNode node) { if (node.isURIResource()) { json.append("uri"); return; } if (node.isAnon()) { json.append("bnode"); return; } if (node.isLiteral()) { json.append("literal"); } }
protected static void addRangeTypes(Model result, Model schema) { Model toAdd = ModelFactory.createDefaultModel(); for (StmtIterator it = schema.listStatements(ANY, RDFS.range, ANY); it.hasNext(); ) { Statement s = it.nextStatement(); RDFNode type = s.getObject(); Property property = s.getSubject().as(Property.class); for (StmtIterator x = result.listStatements(ANY, property, ANY); x.hasNext(); ) { RDFNode ob = x.nextStatement().getObject(); if (ob.isResource()) toAdd.add((Resource) ob, RDF.type, type); } } result.add(toAdd); }
/** * Checks if a given node is a Shape. Note this is just an approximation based on a couple of * hard-coded properties. It should really rely on sh:defaultValueType. * * @param node the node to test * @return true if node is a Shape */ public static boolean isShape(RDFNode node) { if (node instanceof Resource) { if (JenaUtil.hasIndirectType((Resource) node, SH.Shape)) { return true; } else if (node.isAnon() && !((Resource) node).hasProperty(RDF.type)) { if (node.getModel().contains(null, SH.shape, node) || node.getModel().contains(null, SH.filterShape, node)) { return true; } } } return false; }
@Test(enabled = true) public void selectAllNamedGraphs() { List<String> namedGraphs = new ArrayList<String>(); String query = "SELECT DISTINCT(?g) WHERE { GRAPH ?g { ?s ?p ?o } }"; ResultSet results = Util.executeQuery(query, virtDataset); while (results.hasNext()) { QuerySolution sol = results.next(); RDFNode nodeVar = sol.get("g"); namedGraphs.add(nodeVar.toString()); } System.out.println(namedGraphs); }
private void createBackLinkViolation(String subjectURI, String resource) { Model m = ModelFactory.createDefaultModel(); Resource subject = m.createResource(resource); m.add(new StatementImpl(subject, QPRO.exceptionDescription, DQM.NoBackLink)); RDFNode violatedTriple = Commons.generateRDFBlankNode(); m.add( new StatementImpl(violatedTriple.asResource(), RDF.subject, m.createResource(subjectURI))); m.add(new StatementImpl(subject, DQM.hasViolatingTriple, violatedTriple)); this._problemList.add(m); }
/** * Attempts to find the most plausible RDF type for a given property. * * @param property the property to get the type of * @return either owl:DatatypeProperty or owl:ObjectProperty */ private Resource getPropertyType(Resource property) { StmtIterator it = model.listStatements(property, RDFS.range, (RDFNode) null); if (it.hasNext()) { while (it.hasNext()) { Statement s = it.nextStatement(); RDFNode n = s.getObject(); if (n.canAs(Resource.class) && model.contains((Resource) n.as(Resource.class), RDF.type, OWL.Class)) { return OWL.ObjectProperty; } } } return OWL.DatatypeProperty; }
public static boolean isNativeScope(RDFNode node) { if (node != null) { if (node.isAnon()) { if (((Resource) node).hasProperty(RDF.type, SH.NativeScope)) { return true; } if (!((Resource) node).hasProperty(RDF.type)) { return SH.NativeScope.equals(SHACLUtil.getDefaultTemplateType((Resource) node)); } } else if (node.isURIResource()) { return ((Resource) node).hasProperty(RDF.type, SH.NativeScope); } } return false; }
public Collection<URI> getSupportedFacets(URI needUri) throws NoSuchNeedException { List<URI> ret = new LinkedList<URI>(); Need need = DataAccessUtils.loadNeed(needRepository, needUri); Model content = rdfStorageService.loadContent(need); if (content == null) return ret; Resource baseRes = content.getResource(content.getNsPrefixURI("")); StmtIterator stmtIterator = baseRes.listProperties(WON.HAS_FACET); while (stmtIterator.hasNext()) { RDFNode object = stmtIterator.nextStatement().getObject(); if (object.isURIResource()) { ret.add(URI.create(object.toString())); } } return ret; }
protected Set<ValueFactory> getValueFactory(RDFNode valueNode, OntModel displayOntModel) { // maybe use jenabean or owl2java for this? if (valueNode.isResource()) { Resource res = (Resource) valueNode.as(Resource.class); Statement stmt = res.getProperty(DisplayVocabulary.JAVA_CLASS_NAME); if (stmt == null || !stmt.getObject().isLiteral()) { log.debug("Cannot build value factory: java class was " + stmt.getObject()); return Collections.emptySet(); } String javaClassName = ((Literal) stmt.getObject().as(Literal.class)).getLexicalForm(); if (javaClassName == null || javaClassName.length() == 0) { log.debug("Cannot build value factory: no java class was set."); return Collections.emptySet(); } Class<?> clazz; Object newObj; try { clazz = Class.forName(javaClassName); } catch (ClassNotFoundException e) { log.debug("Cannot build value factory: no class found for " + javaClassName); return Collections.emptySet(); } try { newObj = clazz.newInstance(); } catch (Exception e) { log.debug( "Cannot build value factory: exception while creating object of java class " + javaClassName + " " + e.getMessage()); return Collections.emptySet(); } if (newObj instanceof ValueFactory) { ValueFactory valueFactory = (ValueFactory) newObj; return Collections.singleton(valueFactory); } else { log.debug( "Cannot build value factory: " + javaClassName + " does not implement " + ValueFactory.class.getName()); return Collections.emptySet(); } } else { log.debug("Cannot build value factory for " + valueNode); return Collections.emptySet(); } }
@Override public QueryResult query(String queryString, String language, String baseURI) { Model graph = null; GraphConnection graphConnection = null; QueryResult res = null; QueryExecution qe = null; try { graphConnection = openGraph(); graph = graphConnection.getGraph(); graph.enterCriticalSection(Lock.READ); log.debug(String.format("Running query %s", queryString)); // XXX AT: ignore language for now if (language != null && !language.equals("sparql")) { log.warn(String.format("Unknown language %s for query, using SPARQL", language)); } Query query = QueryFactory.create(queryString); query.setBaseURI(baseURI); qe = QueryExecutionFactory.create(query, graph); res = new QueryResultImpl(0, new ArrayList<String>(), new ArrayList<Map<String, Node>>()); ResultSet jenaResults = qe.execSelect(); Integer count = 0; List<String> variableNames = jenaResults.getResultVars(); List<Map<String, Node>> nuxResults = new ArrayList<Map<String, Node>>(); while (jenaResults.hasNext()) { QuerySolution soln = jenaResults.nextSolution(); Map<String, Node> nuxSol = new HashMap<String, Node>(); for (String varName : variableNames) { RDFNode x = soln.get(varName); nuxSol.put(varName, getNXRelationsNode(x.asNode())); } nuxResults.add(nuxSol); count++; } res = new QueryResultImpl(count, variableNames, nuxResults); } finally { if (qe != null) { // Important - free up resources used running the query qe.close(); } if (graph != null) { graph.leaveCriticalSection(); } if (graphConnection != null) { graphConnection.close(); } } return res; }
@Override protected Object readObject(final int columnOrdinal) throws SQLException { checkPosition(); checkColumn(columnOrdinal); final QuerySolution soln = (QuerySolution) getRowObject(); final String colName = query.getProjectVars().get(columnOrdinal - 1).getName(); final RDFNode node = soln.get(colName); if (node == null) { return null; } if (node.isLiteral()) { return TypeConverter.getJavaValue(node.asLiteral()); } return node.toString(); }
private void writeNode(RDFNode node) throws IOException { Node n = node.asNode(); if (n.isURI()) { write(" <uri>" + escape(n.getURI()) + "</uri>\n"); return; } if (n.isBlank()) { write(" <id>" + escape(n.getBlankNodeId().toString()) + "</id>\n"); return; } if (!n.isLiteral()) { throw new JenaException("Don't know how to serialize node " + n); } if (n.getLiteral().getDatatypeURI() != null) { write( " <typedLiteral datatype=\"" + escape(n.getLiteral().getDatatypeURI()) + "\">" + escape(n.getLiteral().getLexicalForm()) + "</typedLiteral>\n"); return; } if (n.getLiteral().language() == null || "".equals(n.getLiteral().language())) { write(" <plainLiteral>" + escape(n.getLiteral().getLexicalForm()) + "</plainLiteral>\n"); return; } write( " <plainLiteral xml:lang=\"" + n.getLiteral().language() + "\">" + escape(n.getLiteral().getLexicalForm()) + "</plainLiteral>\n"); }
private void createViolatingTriple(Statement stmt, String resource) { Model m = ModelFactory.createDefaultModel(); Resource subject = m.createResource(resource); m.add(new StatementImpl(subject, QPRO.exceptionDescription, DQM.ViolatingTriple)); RDFNode violatedTriple = Commons.generateRDFBlankNode(); m.add(new StatementImpl(violatedTriple.asResource(), RDF.type, RDF.Statement)); m.add(new StatementImpl(violatedTriple.asResource(), RDF.subject, stmt.getSubject())); m.add(new StatementImpl(violatedTriple.asResource(), RDF.predicate, stmt.getPredicate())); m.add(new StatementImpl(violatedTriple.asResource(), RDF.object, stmt.getObject())); m.add(new StatementImpl(subject, DQM.hasViolatingTriple, violatedTriple)); this._problemList.add(m); }
protected boolean populateKeyValue(Text key, Triple value) { Resource subject; Property predicate = null; RDFNode object; inErr = false; skipWhiteSpace(); if (in.eof()) { return false; } subject = readResource(); if (inErr) return false; skipWhiteSpace(); try { predicate = model.createProperty(readResource().getURI()); } catch (Exception e1) { e1.printStackTrace(); errorHandler.fatalError(e1); } if (inErr) return false; skipWhiteSpace(); object = readNode(); if (inErr) return false; skipWhiteSpace(); if (badEOF()) return false; if (!expect(".")) return false; try { key.set(getCurrentFileName()); value.setTriple(subject.asNode(), predicate.asNode(), object.asNode()); recordCounter++; // System.out.println("Triple value:"+value); return true; } catch (Exception e2) { e2.printStackTrace(); errorHandler.fatalError(e2); } return false; }
/** * Create a ResearchObject from given zip as input stream. * * @param id Research Object id * @param input The content of the zip aggreagated ResearchObject * @return a instance of ResearchObject */ @SuppressWarnings("resource") public static ResearchObjectSerializable toResearchObject(URI id, InputStream input) { File tmpZipFile = null; ZipFile zipFile = null; try { tmpZipFile = File.createTempFile("zipInput", ".zip"); IOUtils.copy(input, new FileOutputStream(tmpZipFile)); zipFile = new ZipFile(tmpZipFile); } catch (IOException e) { LOGGER.error("Can't create a tmpFile for a RO " + id + " given from dArceo", e); return null; } Enumeration<? extends ZipEntry> entries = zipFile.entries(); // first get Manifest build Jena and parese it ResearchObject researchObject = new ResearchObject(id); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); if (entry.getName().equals("content/.ro/manifest.rdf")) { OntModel model = ModelFactory.createOntologyModel(); try { model.read(zipFile.getInputStream(entry), id.toString() + ".ro/"); Individual roIndividual = model.getResource(id.toString()).as(Individual.class); for (RDFNode node : roIndividual.listPropertyValues(ORE.aggregates).toList()) { if (node.isURIResource()) { URI resourceUri = URI.create(node.asResource().getURI()); URI entryName = URI.create("content/") .resolve(id.relativize(URI.create(node.asResource().getURI())).toString()); InputStream entryInput = zipFile.getInputStream(new ZipEntry(entryName.toString())); researchObject.addSerializable(new ResearchObjectComponent(resourceUri, entryInput)); } } // add manifest InputStream entryInput = zipFile.getInputStream(new ZipEntry("content/.ro/manifest.rdf")); researchObject.addSerializable( new ResearchObjectComponent(id.resolve(".ro/manifest.rdf"), entryInput)); break; } catch (IOException e) { LOGGER.error("can't load the manifest from zip for RO " + id, e); tmpZipFile.delete(); return researchObject; } } } tmpZipFile.delete(); return researchObject; }
private Node walkTree( Model model, Dataset oldDataset, Node node, Node predicate, Query query, QuerySolution initialBinding, Set<Node> reached) { QuerySolutionMap localBinding = new QuerySolutionMap(); localBinding.addAll(initialBinding); localBinding.add("arg1", model.asRDFNode(node)); Dataset dataset = new DatasetWithDifferentDefaultModel(model, oldDataset); QueryExecution qexec = ARQFactory.get().createQueryExecution(query, dataset, localBinding); ResultSet rs = qexec.execSelect(); try { if (rs.hasNext()) { List<String> resultVars = rs.getResultVars(); String varName = resultVars.get(0); RDFNode resultNode = rs.next().get(varName); if (resultNode != null) { return resultNode.asNode(); } } } finally { qexec.close(); } // Recurse into parents ExtendedIterator<Triple> it = createIterator(model.getGraph(), node, predicate); try { while (it.hasNext()) { Node next = getNext(it.next()); if ((next.isBlank() || next.isURI()) && !reached.contains(next)) { reached.add(next); Node nextResult = walkTree(model, oldDataset, next, predicate, query, initialBinding, reached); if (nextResult != null) { return nextResult; } } } } finally { it.close(); } return null; }
public static void main(String[] args) throws IOException { Model model = ModelFactory.createDefaultModel(); try (InputStream is = new BZip2CompressorInputStream(new URL(DBPEDIA_SCHEMA_DOWNLOAD_URL).openStream())) { // model = FileManager.get().loadModel("input/dbpedia_3.9.owl"); model.read(is, null, "RDF/XML"); } System.out.println(model.size() + " triples loaded."); Set<Resource> classes = subjects(model, RDF.type, OWL.Class); Set<Resource> objectProperties = subjects(model, RDF.type, OWL.ObjectProperty); Set<Resource> dataProperties = subjects(model, RDF.type, OWL.DatatypeProperty); Map<Set<Resource>, String> setToName = new HashMap<>(); setToName.put(classes, "classes"); setToName.put(objectProperties, "objectproperties"); setToName.put(dataProperties, "dataproperties"); FieldType stringType = new FieldType(StringField.TYPE_STORED); stringType.setStoreTermVectors(false); FieldType textType = new FieldType(TextField.TYPE_STORED); textType.setStoreTermVectors(false); for (Set<Resource> set : setToName.keySet()) { IndexWriter writer = createWriter(setToName.get(set)); Set<Document> documents = new HashSet<>(); for (Resource resource : set) { for (RDFNode object : model.listObjectsOfProperty(resource, RDFS.label).toSet()) { String label = object.asLiteral().getLexicalForm(); Document luceneDocument = new Document(); luceneDocument.add(new Field("uri", resource.getURI(), stringType)); // luceneDocument.add(new Field("dbpediaUri", indexDocument.getCanonicalDBpediaUri(), // stringType)); luceneDocument.add(new Field("label", label, textType)); // documents.add(luceneDocument); writer.addDocument(luceneDocument); } } writer.addDocuments(documents); writer.commit(); writer.close(); } }