public static Method create(QuerySolution qs) throws Exception { Resource id = (Resource) qs.get("x"); Literal fullPath = (Literal) qs.get("fullPath"); Literal name = (Literal) qs.get("name"); Literal accessModifier = (Literal) qs.get("accessModifier"); Literal lineStart = (Literal) qs.get("line_start"); Literal lineEnd = (Literal) qs.get("line_end"); Resource returnType = (Resource) qs.get("returnType"); Resource hasParameter = (Resource) qs.get("hasParameter"); Resource throwsException = (Resource) qs.get("throwsException"); Resource belongsTo = (Resource) qs.get("belongsTo"); Method meth = new Method(id.toString(), name.getValue().toString()); meth.setAccessModifier(accessModifier.getValue().toString()); meth.setFullPath(fullPath.getValue().toString()); meth.setLineStart(lineStart.getInt()); meth.setLineEnd(lineEnd.getInt()); meth.setReturnType(returnType.toString()); if (hasParameter != null) { meth.setHasParameter(hasParameter.toString()); } if (throwsException != null) { meth.setThrowsException(throwsException.toString()); } meth.setBelongsTo(belongsTo.toString()); return meth; }
protected String jcrMixinNameFromRdfResource(final Resource mixinResource) throws RepositoryException { final String namespace = getJcrNamespaceForRDFNamespace(mixinResource.getNameSpace()); String namespacePrefix = null; final Map<String, String> streamNSMap = checkNotNull(stream().namespaces(), "Use an empty map of namespaces, not null!"); if (streamNSMap.containsValue(namespace)) { LOGGER.debug("Found namespace: {} in stream namespace mapping.", namespace); for (final Map.Entry<String, String> entry : streamNSMap.entrySet()) { final String streamNamespace = entry.getValue(); if (namespace.equals(streamNamespace)) { LOGGER.debug( "Found namespace: {} in stream namespace mapping with prefix: {}.", namespace, namespacePrefix); namespacePrefix = entry.getKey(); } } } else { try { namespacePrefix = session().getNamespacePrefix(namespace); LOGGER.debug( "Found namespace: {} in repository namespace mapping with prefix: {}.", namespace, namespacePrefix); } catch (final NamespaceException e) { throw new MalformedRdfException( "Unable to resolve registered namespace for resource " + mixinResource.toString(), e); } } final String mixinName = namespacePrefix + ":" + mixinResource.getLocalName(); LOGGER.debug("Constructed JCR mixin name: {}", mixinName); return mixinName; }
/** * Index all the resources in a Jena Model to ES * * @param model the model to index * @param bulkRequest a BulkRequestBuilder * @param getPropLabel if set to true all URI property values will be indexed as their label. The * label is taken as the value of one of the properties set in {@link #uriDescriptionList}. */ private void addModelToES(Model model, BulkRequestBuilder bulkRequest, boolean getPropLabel) { long startTime = System.currentTimeMillis(); long bulkLength = 0; HashSet<Property> properties = new HashSet<Property>(); StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement st = it.nextStatement(); Property prop = st.getPredicate(); String property = prop.toString(); if (rdfPropList.isEmpty() || (isWhitePropList && rdfPropList.contains(property)) || (!isWhitePropList && !rdfPropList.contains(property)) || (normalizeProp.containsKey(property))) { properties.add(prop); } } ResIterator resIt = model.listSubjects(); while (resIt.hasNext()) { Resource rs = resIt.nextResource(); Map<String, ArrayList<String>> jsonMap = getJsonMap(rs, properties, model, getPropLabel); bulkRequest.add( client.prepareIndex(indexName, typeName, rs.toString()).setSource(mapToString(jsonMap))); bulkLength++; // We want to execute the bulk for every DEFAULT_BULK_SIZE requests if (bulkLength % EEASettings.DEFAULT_BULK_SIZE == 0) { BulkResponse bulkResponse = bulkRequest.execute().actionGet(); // After executing, flush the BulkRequestBuilder. bulkRequest = client.prepareBulk(); if (bulkResponse.hasFailures()) { processBulkResponseFailure(bulkResponse); } } } // Execute remaining requests if (bulkRequest.numberOfActions() > 0) { BulkResponse response = bulkRequest.execute().actionGet(); // Handle failure by iterating through each bulk response item if (response.hasFailures()) { processBulkResponseFailure(response); } } // Show time taken to index the documents logger.info( "Indexed {} documents on {}/{} in {} seconds", bulkLength, indexName, typeName, (System.currentTimeMillis() - startTime) / 1000.0); }
public static void main(String[] args) { List<String> obj = new ArrayList<String>(); Scanner input = new Scanner(System.in); System.out.print("Enter URI: "); String userIn = input.nextLine(); // create an empty Model Model model = ModelFactory.createDefaultModel(); // read the RDF/XML file model.read(userIn); // write it to standard out // model.write(System.out); // list the statements in the Model StmtIterator iter = model.listStatements(); System.out.println(); // print out the predicate, subject and object of each statement while (iter.hasNext()) { Statement stmt = iter.nextStatement(); // get next statement Resource subject = stmt.getSubject(); // get the subject Property predicate = stmt.getPredicate(); // get the predicate RDFNode object = stmt.getObject(); // get the object System.out.print(subject.toString()); System.out.print(" -> " + predicate.toString() + " -> "); if (object instanceof Resource) { System.out.print(object.toString() + "\n"); } else { // object is a literal System.out.print(" \"" + object.toString() + "\"\n"); } } /* for(int i = 0; i < (obj.size()); i++){ String sparqlQueryString1= "SELECT ?s ?o "+ "WHERE {"+ "?s ?p ?o ."+ "?o <bif:contains> \""+obj.get(i)+"\" ."+ "}"+ "limit 10"; Query query = QueryFactory.create(sparqlQueryString1); QueryExecution qexec = QueryExecutionFactory.sparqlService("http://pubmed.bio2rdf.org/sparql", query); ResultSet results = qexec.execSelect(); System.out.println("Query: "+obj.get(i)); ResultSetFormatter.out(System.out, results, query); qexec.close() ; } */ }
private Model execute(Model inputModel, String endpoint) { Model cube = createModel(); Resource dataset; Calendar calendar = Calendar.getInstance(TimeZone.getDefault()); dataset = cube.createResource(GK.uri + "Average_Surface", QB.Dataset); dataset.addLiteral(RDFS.comment, "Average Surface per class"); dataset.addLiteral(DCTerms.date, cube.createTypedLiteral(calendar)); dataset.addLiteral(DCTerms.publisher, "R & D, Unister GmbH, Geoknow"); dataset.addProperty(QB.structure, cube.createResource(STRUCTURE)); if (endpoint != null) { dataset.addProperty(DCTerms.source, endpoint); } QueryExecution qExec; if (inputModel != null) { qExec = QueryExecutionFactory.create(GET_CLASSES, inputModel); } else { qExec = QueryExecutionFactory.sparqlService(endpoint, GET_CLASSES, defaultGraphs, defaultGraphs); } ResultSet result = qExec.execSelect(); int obsCount = 0; while (result.hasNext()) { double area = 0; int i = 0; Resource owlClass = result.next().get("class").asResource(); if (!blacklist.contains(owlClass.toString())) { System.out.println(owlClass); GET_INSTANCES.setIri("class", owlClass.getURI()); QueryExecution qexecInstances; if (inputModel != null) { qexecInstances = QueryExecutionFactory.create(GET_INSTANCES.asQuery(), inputModel); } else { qexecInstances = QueryExecutionFactory.sparqlService( endpoint, GET_INSTANCES.asQuery(), defaultGraphs, defaultGraphs); } for (ResultSet instancesResult = qexecInstances.execSelect(); instancesResult.hasNext(); ) { QuerySolution next = instancesResult.next(); String instance = next.get("instance").asResource().getURI(); if (instance == null) { continue; } POLYGON.setIri("instance", instance); QueryExecution qexecMember; if (inputModel != null) { qexecMember = QueryExecutionFactory.create(POLYGON.asQuery(), inputModel); } else { qexecMember = QueryExecutionFactory.sparqlService( endpoint, POLYGON.asQuery(), defaultGraphs, defaultGraphs); } StringBuilder polygonBuilder = new StringBuilder(); firstLat = null; firstLong = null; for (ResultSet latLong = qexecMember.execSelect(); latLong.hasNext(); ) { processPoint(latLong.next(), polygonBuilder); } if (polygonBuilder.length() > 0) { area += calculateArea(polygonBuilder); } else { area = 0; polygonBuilder.setLength(0); this.firstLat = null; this.firstLong = null; MULTI_POLYGON.setIri("instance", instance); QueryExecution qexecMultiPolygon; if (inputModel != null) { qexecMultiPolygon = QueryExecutionFactory.create(MULTI_POLYGON.asQuery(), inputModel); } else { qexecMultiPolygon = QueryExecutionFactory.sparqlService( endpoint, MULTI_POLYGON.asQuery(), defaultGraphs, defaultGraphs); } String polygonName = ""; for (ResultSet latLong = qexecMultiPolygon.execSelect(); latLong.hasNext(); ) { QuerySolution solution = latLong.next(); if (!polygonName.equals(solution.get("polygon").asNode().getBlankNodeLabel())) { if (polygonBuilder.length() > 0) { area += calculateArea(polygonBuilder); } this.firstLat = null; this.firstLong = null; polygonBuilder.setLength(0); } polygonName = solution.get("polygon").asNode().getBlankNodeLabel(); processPoint(solution, polygonBuilder); } } i++; } } Resource obs = cube.createResource(structureUri + "/obs/" + obsCount, QB.Observation); double average = i == 0 ? 0 : area / i; obs.addProperty(GK.MEASURE.Average, cube.createTypedLiteral(average)); obs.addProperty(GK.DIM.Class, owlClass); obs.addProperty(QB.dataset, dataset); obsCount++; } return cube; }
/** @see java.lang.Object#toString() */ public String toString() { return wrapped.toString(); }
/** * Get JSON map for a given resource by applying the river settings * * @param rs resource being processed * @param properties properties to be indexed * @param model model returned by the indexing query * @param getPropLabel if set to true all URI property values will be indexed as their label. The * label is taken as the value of one of the properties set in {@link #uriDescriptionList}. * @return map of properties to be indexed for res */ private Map<String, ArrayList<String>> getJsonMap( Resource rs, Set<Property> properties, Model model, boolean getPropLabel) { Map<String, ArrayList<String>> jsonMap = new HashMap<String, ArrayList<String>>(); ArrayList<String> results = new ArrayList<String>(); if (addUriForResource) { results.add("\"" + rs.toString() + "\""); jsonMap.put("http://www.w3.org/1999/02/22-rdf-syntax-ns#about", results); } Set<String> rdfLanguages = new HashSet<String>(); for (Property prop : properties) { NodeIterator niter = model.listObjectsOfProperty(rs, prop); String property = prop.toString(); results = new ArrayList<String>(); String lang; String currValue; while (niter.hasNext()) { RDFNode node = niter.next(); currValue = getStringForResult(node, getPropLabel); if (addLanguage) { if (node.isLiteral()) { lang = node.asLiteral().getLanguage(); if (!lang.isEmpty()) { rdfLanguages.add("\"" + lang + "\""); } } } String shortValue = currValue; int currLen = currValue.length(); // Unquote string if (currLen > 1) shortValue = currValue.substring(1, currLen - 1); // If either whiteMap does contains shortValue // or blackMap contains the value // skip adding it to the index boolean whiteMapCond = whiteMap.containsKey(property) && !whiteMap.get(property).contains(shortValue); boolean blackMapCond = blackMap.containsKey(property) && blackMap.get(property).contains(shortValue); if (whiteMapCond || blackMapCond) { continue; } if (normalizeObj.containsKey(shortValue)) { results.add("\"" + normalizeObj.get(shortValue) + "\""); } else { results.add(currValue); } } // Do not index empty properties if (results.isEmpty()) continue; if (normalizeProp.containsKey(property)) { property = normalizeProp.get(property); if (jsonMap.containsKey(property)) { jsonMap.get(property).addAll(results); } else { jsonMap.put(property, results); } } else { jsonMap.put(property, results); } } if (addLanguage) { if (rdfLanguages.isEmpty() && !language.isEmpty()) rdfLanguages.add(language); if (!rdfLanguages.isEmpty()) jsonMap.put("language", new ArrayList<String>(rdfLanguages)); } for (Map.Entry<String, String> it : normalizeMissing.entrySet()) { if (!jsonMap.containsKey(it.getKey())) { ArrayList<String> res = new ArrayList<String>(); res.add("\"" + it.getValue() + "\""); jsonMap.put(it.getKey(), res); } } return jsonMap; }