/** * Builds a String result for Elastic Search from an RDFNode * * @param node An RDFNode representing the value of a property for a given resource * @return If the RDFNode has a Literal value, among Boolean, Byte, Double, Float, Integer Long, * Short, this value is returned, converted to String * <p>If the RDFNode has a String Literal value, this value will be returned, surrounded by * double quotes * <p>If the RDFNode has a Resource value (URI) and toDescribeURIs is set to true, the value * of @getLabelForUri for the resource is returned, surrounded by double quotes. Otherwise, * the URI will be returned */ private String getStringForResult(RDFNode node, boolean getNodeLabel) { String result = ""; boolean quote = false; if (node.isLiteral()) { Object literalValue = node.asLiteral().getValue(); try { Class<?> literalJavaClass = node.asLiteral().getDatatype().getJavaClass(); if (literalJavaClass.equals(Boolean.class) || Number.class.isAssignableFrom(literalJavaClass)) { result += literalValue; } else { result = EEASettings.parseForJson(node.asLiteral().getLexicalForm()); quote = true; } } catch (java.lang.NullPointerException npe) { result = EEASettings.parseForJson(node.asLiteral().getLexicalForm()); quote = true; } } else if (node.isResource()) { result = node.asResource().getURI(); if (getNodeLabel) { result = getLabelForUri(result); } quote = true; } if (quote) { result = "\"" + result + "\""; } return result; }
@Override public void processResults() { String queryBegin = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>" + "PREFIX mstr: <http://methodo-stat-tutor.com#>" + "SELECT ?NOTION " + "WHERE {"; String queryEnd = "}"; // create a query that asks for the color of the wine that // would go with each meal course String queryStr2 = queryBegin + " ?EXO mstr:traiteNotion ?NOTION ." + "FILTER (?EXO = mstr:" + this.exercise + ")\n" + queryEnd; log.error(queryStr2); Query query2 = QueryFactory.create(queryStr2); ResultSet results = SparqlDLExecutionFactory.create(query2, ontModel).execSelect(); ArrayList<java.net.URI> needNotion = new ArrayList<java.net.URI>(); ArrayList<java.net.URI> giveNotion = new ArrayList<java.net.URI>(); while (results.hasNext()) { RDFNode a = results.next().get("?NOTION"); giveNotion.add(java.net.URI.create(a.asLiteral().getString())); needNotion.addAll(getSuperClassFromUri(a.asLiteral().getString())); } // remove duplicates from neednotion Set<java.net.URI> hs = new HashSet<>(); hs.addAll(needNotion); needNotion.clear(); needNotion.addAll(hs); // end remove duplicate this.needNotion = needNotion; this.giveNotion = giveNotion; // ajout des notions au modele int i = 0; while (i < needNotion.size()) { log.error(needNotion.get(i).toString()); addStatementToOntModel("needNotion", needNotion.get(i).toString()); i++; } i = 0; while (i < giveNotion.size()) { log.error(giveNotion.get(i).toString()); addStatementToOntModel("giveNotion", giveNotion.get(i).toString()); i++; } }
/** * Create a JCR value from an RDF node with the given JCR type * * @param valueFactory * @param data * @param type * @return * @throws RepositoryException */ public Value createValue(final ValueFactory valueFactory, final RDFNode data, final int type) throws RepositoryException { assert (valueFactory != null); if (data.isURIResource() && (type == REFERENCE || type == WEAKREFERENCE)) { // reference to another node (by path) final Node nodeFromGraphSubject = session.getNode(graphSubjects.getPathFromSubject(data.asResource())); return valueFactory.createValue(nodeFromGraphSubject, type == WEAKREFERENCE); } else if (data.isURIResource() || type == URI) { // some random opaque URI return valueFactory.createValue(data.toString(), PropertyType.URI); } else if (data.isResource()) { // a non-URI resource (e.g. a blank node) return valueFactory.createValue(data.toString(), UNDEFINED); } else if (data.isLiteral() && type == UNDEFINED) { // the JCR schema doesn't know what this should be; so introspect // the RDF and try to figure it out final Literal literal = data.asLiteral(); final RDFDatatype dataType = literal.getDatatype(); final Object rdfValue = literal.getValue(); if (rdfValue instanceof Boolean) { return valueFactory.createValue((Boolean) rdfValue); } else if (rdfValue instanceof Byte || (dataType != null && dataType.getJavaClass() == Byte.class)) { return valueFactory.createValue(literal.getByte()); } else if (rdfValue instanceof Double) { return valueFactory.createValue((Double) rdfValue); } else if (rdfValue instanceof Float) { return valueFactory.createValue((Float) rdfValue); } else if (rdfValue instanceof Long || (dataType != null && dataType.getJavaClass() == Long.class)) { return valueFactory.createValue(literal.getLong()); } else if (rdfValue instanceof Short || (dataType != null && dataType.getJavaClass() == Short.class)) { return valueFactory.createValue(literal.getShort()); } else if (rdfValue instanceof Integer) { return valueFactory.createValue((Integer) rdfValue); } else if (rdfValue instanceof XSDDateTime) { return valueFactory.createValue(((XSDDateTime) rdfValue).asCalendar()); } else { return valueFactory.createValue(literal.getString(), STRING); } } else { LOGGER.debug("Using default JCR value creation for RDF literal: {}", data); return valueFactory.createValue(data.asLiteral().getString(), type); } }
private static void doValueString(StringBuffer json, RDFNode node) { if (node.isURIResource()) { json.append(node.asResource().getURI()); return; } if (node.isAnon()) { json.append(node.asResource().getId()); return; } if (node.isLiteral()) { json.append(node.asLiteral().getString()); doLang(json, node.asLiteral()); doDatatype(json, node.asLiteral()); } }
private int getMaxRank(String objectUri, String subjectUri, VitroRequest vreq) { int maxRank = 0; // default value if (objectUri == null) { // adding new webpage String queryStr = QueryUtils.subUriForQueryVar(MAX_RANK_QUERY, "subject", subjectUri); log.debug("Query string is: " + queryStr); try { ResultSet results = QueryUtils.getQueryResults(queryStr, vreq); if (results != null && results.hasNext()) { // there is at most one result QuerySolution soln = results.next(); RDFNode node = soln.get("rank"); if (node != null && node.isLiteral()) { // node.asLiteral().getInt() won't return an xsd:string that // can be parsed as an int. int rank = Integer.parseInt(node.asLiteral().getLexicalForm()); if (rank > maxRank) { log.debug("setting maxRank to " + rank); maxRank = rank; } } } } catch (NumberFormatException e) { log.error("Invalid rank returned from query: not an integer value."); } catch (Exception e) { log.error(e, e); } } return maxRank; }
/** * Query SPARQL endpoint with a SELECT query * * @param qExec QueryExecution encapsulating the query * @return model retrieved by querying the endpoint */ private Model getSelectModel(QueryExecution qExec) { Model model = ModelFactory.createDefaultModel(); Graph graph = model.getGraph(); ResultSet results = qExec.execSelect(); while (results.hasNext()) { QuerySolution sol = results.next(); String subject; String predicate; RDFNode object; try { subject = sol.getResource("s").toString(); predicate = sol.getResource("p").toString(); object = sol.get("o"); } catch (NoSuchElementException e) { logger.error("SELECT query does not return a (?s ?p ?o) Triple"); continue; } Node objNode; if (object.isLiteral()) { Literal obj = object.asLiteral(); objNode = NodeFactory.createLiteral(obj.getString(), obj.getDatatype()); } else { objNode = NodeFactory.createLiteral(object.toString()); } graph.add( new Triple(NodeFactory.createURI(subject), NodeFactory.createURI(predicate), objNode)); } return model; }
public static JSONObject buildAnswerBoxFeatures(String uri) { JSONObject document = new JSONObject(); document.put("URI", new JsonString(uri)); Set<RDFNode> set = Sets.newHashSet(); try { String query = "select ?thumbnail ?abstract ?comment ?label" + "where {" + "<" + uri + "> <http://dbpedia.org/ontology/thumbnail> ?thumbnail;" + "<http://dbpedia.org/ontology/abstract> ?abstract;" + "<http://www.w3.org/2000/01/rdf-schema#label> ?label;" + "<http://www.w3.org/2000/01/rdf-schema#comment> ?comment." + "FILTER(langMatches(lang(?abstract), \"EN\") &&" + " langMatches(lang(?label), \"EN\") &&" + " langMatches(lang(?comment), \"EN\"))" + "}"; QueryExecution qe = qef.createQueryExecution(query); if (qe != null && query.toString() != null) { ResultSet results = qe.execSelect(); while (results.hasNext()) { QuerySolution next = results.next(); RDFNode thumbnail = next.get("thumbnail"); RDFNode abstractLiteral = next.get("abstract"); RDFNode commentLiteral = next.get("comment"); RDFNode labelLiteral = next.get("label"); if (thumbnail != null) { document.put("thumbnail", new JsonString(thumbnail.asResource().getURI())); } if (abstractLiteral != null) { document.put("abstract", new JsonString(abstractLiteral.asLiteral().getString())); } if (commentLiteral != null) { document.put("comment", new JsonString(commentLiteral.asLiteral().getString())); } if (labelLiteral != null) { document.put("label", new JsonString(labelLiteral.asLiteral().getString())); } } } } catch (Exception e) { log.error("Cannot ask DBpedia for verbose description of " + uri, e); } return document; }
@Override public Country getCountryIdentifier() { RDFNode countryIdentifier = getPropertyValue(mdrDatabase.getVocabulary().countryIdentifier); if (countryIdentifier == null) { logger.debug("LanguageIdentification does not have countryIdentifier"); return null; } return Country.getByValue(countryIdentifier.asLiteral().getString()); }
@Override protected Object readObject(final int columnOrdinal) throws SQLException { checkPosition(); checkColumn(columnOrdinal); final QuerySolution soln = (QuerySolution) getRowObject(); final String colName = query.getProjectVars().get(columnOrdinal - 1).getName(); final RDFNode node = soln.get(colName); if (node == null) { return null; } if (node.isLiteral()) { return TypeConverter.getJavaValue(node.asLiteral()); } return node.toString(); }
public static void main(String[] args) throws IOException { Model model = ModelFactory.createDefaultModel(); try (InputStream is = new BZip2CompressorInputStream(new URL(DBPEDIA_SCHEMA_DOWNLOAD_URL).openStream())) { // model = FileManager.get().loadModel("input/dbpedia_3.9.owl"); model.read(is, null, "RDF/XML"); } System.out.println(model.size() + " triples loaded."); Set<Resource> classes = subjects(model, RDF.type, OWL.Class); Set<Resource> objectProperties = subjects(model, RDF.type, OWL.ObjectProperty); Set<Resource> dataProperties = subjects(model, RDF.type, OWL.DatatypeProperty); Map<Set<Resource>, String> setToName = new HashMap<>(); setToName.put(classes, "classes"); setToName.put(objectProperties, "objectproperties"); setToName.put(dataProperties, "dataproperties"); FieldType stringType = new FieldType(StringField.TYPE_STORED); stringType.setStoreTermVectors(false); FieldType textType = new FieldType(TextField.TYPE_STORED); textType.setStoreTermVectors(false); for (Set<Resource> set : setToName.keySet()) { IndexWriter writer = createWriter(setToName.get(set)); Set<Document> documents = new HashSet<>(); for (Resource resource : set) { for (RDFNode object : model.listObjectsOfProperty(resource, RDFS.label).toSet()) { String label = object.asLiteral().getLexicalForm(); Document luceneDocument = new Document(); luceneDocument.add(new Field("uri", resource.getURI(), stringType)); // luceneDocument.add(new Field("dbpediaUri", indexDocument.getCanonicalDBpediaUri(), // stringType)); luceneDocument.add(new Field("label", label, textType)); // documents.add(luceneDocument); writer.addDocument(luceneDocument); } } writer.addDocuments(documents); writer.commit(); writer.close(); } }
@SuppressWarnings("unchecked") public Set<S> getNegativeExamples(EvaluatedAxiom<T> axiom) { ResultSet rs = executeSelectQuery(negExamplesQueryTemplate.toString()); Set<OWLObject> negExamples = new TreeSet<OWLObject>(); while (rs.hasNext()) { RDFNode node = rs.next().get("s"); if (node.isResource()) { negExamples.add(df.getOWLNamedIndividual(IRI.create(node.asResource().getURI()))); } else if (node.isLiteral()) { negExamples.add(convertLiteral(node.asLiteral())); } } return (Set<S>) negExamples; // throw new UnsupportedOperationException("Getting negative examples is not possible."); }
public int[] getSourceColumns(Resource resource) { if (_model.contains(resource, Vertere.source_column)) { Statement sourceColumn = _model.getProperty(resource, Vertere.source_column); return new int[] {sourceColumn.getInt()}; } else if (_model.contains(resource, Vertere.source_columns)) { Statement sourceColumns = _model.getProperty(resource, Vertere.source_columns); Resource listResource = sourceColumns.getResource(); RDFList list = listResource.as(RDFList.class); List<RDFNode> javalist = list.asJavaList(); int[] sourceColumnNumbers = new int[javalist.size()]; for (int i = 0; i < javalist.size(); i++) { RDFNode node = javalist.get(i); Literal value = node.asLiteral(); sourceColumnNumbers[i] = value.getInt(); } return sourceColumnNumbers; } else { return new int[0]; } }
@Override public void nextTuple() { try { if (numOfReports < maxReports) { currTime = System.nanoTime(); LOG.info( "startTime: " + startTime + " currTime: " + currTime + " interval: " + ((currTime - startTime) * 1e-6)); if (((currTime - startTime) * 1e-6) >= interval) { if (query == null) query = QueryFactory.create(queryString); Store store = StoreFactory.getJenaHBaseStore(configFile, iri, isReified); int solnCount = 0; ResultSet rs = store.executeSelectQuery(query); List<Var> listVars = query.getProject().getVars(); while (rs.hasNext()) { solnCount++; QuerySolution qs = rs.next(); Object[] results = new String[listVars.size() + 1]; if (solnCount == 1) results[0] = "new"; else results[0] = "cont"; for (int i = 1; i <= listVars.size(); i++) { Var var = listVars.get(i - 1); RDFNode value = qs.get(var.toString()); if (value.isResource() || value.isAnon()) results[i] = value.asResource().toString(); else if (value.isLiteral()) results[i] = value.asLiteral().toString(); } collector.emit(new Values(results)); } numOfReports += 1; startTime = currTime; } Thread.sleep(30000); } } catch (Exception e) { throw new TopologyException("Exception in query spout:: ", e); } }
protected String getTextForRow(QuerySolution row, boolean addSpace) { if (row == null) return ""; StringBuffer text = new StringBuffer(); Iterator<String> iter = row.varNames(); while (iter.hasNext()) { String name = iter.next(); RDFNode node = row.get(name); if (node != null) { String value = (node.isLiteral()) ? node.asLiteral().getString() : node.toString(); if (StringUtils.isNotBlank(value)) { if (addSpace) { text.append(" ").append(value); } else { text.append(value); } } } else { log.debug(name + " is null"); } } return text.toString(); }
/** * Builds up statements like [] rr:template "http://data.example.com/department/{DEPTNO}" or [] * rr:class ex:Department and returns them as a Statement List. * * @param r2rml the target com.hp.hpl.jena.rdf.model.Model * @param mappingData the target that should be mapped to relational structures (subject, * predicate or object) * @param varDefs the construction definition of the target value based in the actual database * value and some additional data like prefix strings or certain functions like uri( ... ) * @return a List<Statement> containing all the subject map statements */ private List<Statement> buildMapStatements(Model r2rml, Node mappingData, VarDefinition varDefs) { List<Statement> results = new ArrayList<Statement>(); // a blank node [] Resource mapSubject = ResourceFactory.createResource(); // rr:template or rr:column or rr:constant Property mapPredicate; // a literal like "http://data.example.com/department/{DEPTNO}" or // simply "DEPTNO" (column name) or a constant "Foo bar!!" // (or in rare cases a URI, which is handled separately) Literal mapObject; // template or column or constant if (mappingData.isVariable()) { Collection<RestrictedExpr> restrictions = varDefs.getDefinitions((Var) mappingData); List<PredicateAndObject> mapPredicateAndObjects = processRestrictions(restrictions); for (PredicateAndObject result : mapPredicateAndObjects) { mapPredicate = result.getPrediacte(); Statement resultStatement; RDFNode rawObject = result.getRawObject(); // object is literal if (rawObject.isLiteral()) { mapObject = rawObject.asLiteral(); resultStatement = r2rml.createStatement(mapSubject, mapPredicate, mapObject); // object is blank node } else if (rawObject.isAnon()) { Resource mapResObject = rawObject.asResource(); resultStatement = r2rml.createStatement(mapSubject, mapPredicate, mapResObject); // object is resource } else { Resource mapResObject = rawObject.asResource(); resultStatement = r2rml.createStatement(mapSubject, mapPredicate, mapResObject); } results.add(resultStatement); } // everything that is not a variable is handled as a constant } else if (mappingData.isConcrete()) { // URIs and Literals have to be handled separately since the methods // to retrieve the respective value are different Statement resultStatement; // URI if (mappingData.isURI()) { /* * This case is somewhat special since the mapObject is not a * Literal. So, this needs some special handling: * - the Literal mapObject will not be used * - a special mapObject_uri Resource will be created * - the result will be created, appended to the List and * returned to not go through any further ordinary processing */ Resource mapObject_uri = ResourceFactory.createResource(mappingData.getURI()); mapPredicate = ResourceFactory.createProperty(rrNamespace, "constant"); resultStatement = r2rml.createStatement(mapSubject, mapPredicate, mapObject_uri); results.add(resultStatement); return results; // Literal } else if (mappingData.isLiteral()) { mapObject = ResourceFactory.createPlainLiteral(mappingData.getLiteral().toString(false)); // else (e.g. blank node) } else { // mapSubject.isBlank() == true /* * Hmm... I think this violates the standard. So lean back and * enjoy the trace... */ mapObject = null; } mapPredicate = ResourceFactory.createProperty(rrPrefix, "constant"); resultStatement = r2rml.createStatement(mapSubject, mapPredicate, mapObject); results.add(resultStatement); } return results; }
/* (non-Javadoc) * @see com.hp.hpl.jena.rdf.model.RDFNode#asLiteral() */ @Override public Literal asLiteral() { return rdfNode.asLiteral(); }
/** * Get JSON map for a given resource by applying the river settings * * @param rs resource being processed * @param properties properties to be indexed * @param model model returned by the indexing query * @param getPropLabel if set to true all URI property values will be indexed as their label. The * label is taken as the value of one of the properties set in {@link #uriDescriptionList}. * @return map of properties to be indexed for res */ private Map<String, ArrayList<String>> getJsonMap( Resource rs, Set<Property> properties, Model model, boolean getPropLabel) { Map<String, ArrayList<String>> jsonMap = new HashMap<String, ArrayList<String>>(); ArrayList<String> results = new ArrayList<String>(); if (addUriForResource) { results.add("\"" + rs.toString() + "\""); jsonMap.put("http://www.w3.org/1999/02/22-rdf-syntax-ns#about", results); } Set<String> rdfLanguages = new HashSet<String>(); for (Property prop : properties) { NodeIterator niter = model.listObjectsOfProperty(rs, prop); String property = prop.toString(); results = new ArrayList<String>(); String lang; String currValue; while (niter.hasNext()) { RDFNode node = niter.next(); currValue = getStringForResult(node, getPropLabel); if (addLanguage) { if (node.isLiteral()) { lang = node.asLiteral().getLanguage(); if (!lang.isEmpty()) { rdfLanguages.add("\"" + lang + "\""); } } } String shortValue = currValue; int currLen = currValue.length(); // Unquote string if (currLen > 1) shortValue = currValue.substring(1, currLen - 1); // If either whiteMap does contains shortValue // or blackMap contains the value // skip adding it to the index boolean whiteMapCond = whiteMap.containsKey(property) && !whiteMap.get(property).contains(shortValue); boolean blackMapCond = blackMap.containsKey(property) && blackMap.get(property).contains(shortValue); if (whiteMapCond || blackMapCond) { continue; } if (normalizeObj.containsKey(shortValue)) { results.add("\"" + normalizeObj.get(shortValue) + "\""); } else { results.add(currValue); } } // Do not index empty properties if (results.isEmpty()) continue; if (normalizeProp.containsKey(property)) { property = normalizeProp.get(property); if (jsonMap.containsKey(property)) { jsonMap.get(property).addAll(results); } else { jsonMap.put(property, results); } } else { jsonMap.put(property, results); } } if (addLanguage) { if (rdfLanguages.isEmpty() && !language.isEmpty()) rdfLanguages.add(language); if (!rdfLanguages.isEmpty()) jsonMap.put("language", new ArrayList<String>(rdfLanguages)); } for (Map.Entry<String, String> it : normalizeMissing.entrySet()) { if (!jsonMap.containsKey(it.getKey())) { ArrayList<String> res = new ArrayList<String>(); res.add("\"" + it.getValue() + "\""); jsonMap.put(it.getKey(), res); } } return jsonMap; }
public PagingLoadResult<Example> getSPARQLQueryResultWithProperties( String query, List<String> properties, PagingLoadConfig config) throws AutoSPARQLException { List<Example> queryResult = new ArrayList<Example>(); // properties.remove("label"); int limit = config.getLimit(); int offset = config.getOffset(); int totalLength = 10; if (currentQueryResultSize == -1) { try { ResultSetRewindable rs = SparqlQuery.convertJSONtoResultSet( selectCache.executeSelectQuery(endpoint, getCountQuery(query))); currentQueryResultSize = rs.next().getLiteral(rs.getResultVars().get(0)).getInt(); } catch (Exception e) { e.printStackTrace(); currentQueryResultSize = 10; } } totalLength = currentQueryResultSize; List<String> propertiesToDo = new ArrayList<String>(properties); for (Map<String, Object> prop2Value : propertiesCache.values()) { propertiesToDo.removeAll(prop2Value.keySet()); } if (propertiesToDo.size() > 0) { String queryTriples = query.substring(18, query.length() - 1); StringBuilder newQuery = new StringBuilder(); Map<String, String> var2URIMap = new HashMap<String, String>(propertiesToDo.size()); if (propertiesToDo.size() == 1 && propertiesToDo.get(0).equals(RDFS.label.getURI())) { newQuery.append("SELECT DISTINCT ?x0 ?label ?imageURL{"); newQuery.append(queryTriples); newQuery.append("?x0 <").append(RDFS.label).append("> ?label.\n"); newQuery .append("OPTIONAL{?x0 <") .append("http://dbpedia.org/ontology/thumbnail") .append("> ?imageURL.}\n"); newQuery.append("FILTER(LANGMATCHES(LANG(?label),'en'))"); newQuery.append("}"); } else { for (String property : propertiesToDo) { var2URIMap.put( property2LabelMap.get(property).replace(" ", "_").replace("(", "").replace(")", ""), property); } newQuery.append("SELECT DISTINCT ?x0 ?label ?imageURL "); for (String var : var2URIMap.keySet()) { newQuery.append("?").append(var).append(" "); newQuery.append("?").append(var).append("_label "); } newQuery.append("{"); newQuery.append(queryTriples); newQuery.append("?x0 <").append(RDFS.label).append("> ?label.\n"); newQuery .append("OPTIONAL{?x0 <") .append("http://dbpedia.org/ontology/thumbnail") .append("> ?imageURL.}\n"); for (Entry<String, String> entry : var2URIMap.entrySet()) { newQuery .append("OPTIONAL{?x0 <") .append(entry.getValue()) .append("> ?") .append(entry.getKey()) .append(".}\n"); } for (Entry<String, String> entry : var2URIMap.entrySet()) { newQuery .append("OPTIONAL{?") .append(entry.getKey()) .append(" <") .append(RDFS.label) .append("> ?") .append(entry.getKey()) .append("_label.\n"); newQuery.append("FILTER(LANGMATCHES(LANG(?" + entry.getKey() + "_label),'en'))}\n"); } newQuery.append("FILTER(LANGMATCHES(LANG(?label),'en'))"); newQuery.append("}"); } logger.debug("Query with properties:\n" + newQuery.toString()); try { ResultSetRewindable rs = SparqlQuery.convertJSONtoResultSet( selectCache.executeSelectQuery(endpoint, modifyQuery(newQuery + " LIMIT 1000"))); String uri; String label = ""; String imageURL = ""; QuerySolution qs; RDFNode object; while (rs.hasNext()) { qs = rs.next(); uri = qs.getResource("x0").getURI(); label = qs.getLiteral("label").getLexicalForm(); imageURL = qs.getResource("imageURL") != null ? qs.getResource("imageURL").getURI() : ""; Map<String, Object> properties2Value = propertiesCache.get(uri); if (properties2Value == null) { properties2Value = new HashMap<String, Object>(); properties2Value.put(RDFS.label.getURI(), label); properties2Value.put("http://dbpedia.org/ontology/thumbnail", imageURL); propertiesCache.put(uri, properties2Value); } Object value; String property; for (Entry<String, String> entry : var2URIMap.entrySet()) { value = ""; property = entry.getValue(); object = qs.get(entry.getKey() + "_label"); if (object == null) { object = qs.get(entry.getKey()); } if (object != null) { if (object.isURIResource()) { value = object.asResource().getURI(); } else if (object.isLiteral()) { Literal lit = object.asLiteral(); // if(lit.getDatatypeURI().equals(XSD.BOOLEAN)){ // property2DatatypeMap.put(property, Boolean.class); // value = lit.getBoolean(); // } else if(lit.getDatatypeURI().equals(XSD.INT)){ // property2DatatypeMap.put(property, Integer.class); // value = lit.getInt(); // } else if(lit.getDatatypeURI().equals(XSD.DOUBLE)){ // property2DatatypeMap.put(property, Double.class); // value = lit.getDouble(); // } else if(lit.getDatatypeURI().equals(XSD.FLOAT)){ // property2DatatypeMap.put(property, Float.class); // value = lit.getFloat(); // } else { // property2DatatypeMap.put(property, String.class); // value = object.asLiteral().getLexicalForm(); // } value = object.asLiteral().getLexicalForm(); } } Object oldValue = properties2Value.get(property); if (oldValue != null && value != null) { value = oldValue + ", " + value; } properties2Value.put(property, value); } } } catch (Exception e) { logger.error("Error while getting result for query \n" + newQuery, e); } } Example example; int cnt = 0; for (Entry<String, Map<String, Object>> uri2PropertyValues : propertiesCache.entrySet()) { // if(cnt++ == limit+offset){ // break; // } // if(cnt > offset){ example = new Example(); example.setAllowNestedValues(false); example.set("uri", uri2PropertyValues.getKey()); Object value; String property; for (Entry<String, Object> property2Value : uri2PropertyValues.getValue().entrySet()) { property = property2Value.getKey(); value = property2Value.getValue(); // if(value == null){ // Class cls = property2DatatypeMap.get(property); // if(cls == String.class){ // value = ""; // } else if(cls == Integer.class){ // value = Integer.valueOf(-1); // } else if(cls == Double.class){ // value = Double.valueOf(-1); // } else if(cls == Float.class){ // value = Float.valueOf(-1); // } else if(cls == Boolean.class){ // value = Boolean.FALSE; // } // } example.set(property, value); } queryResult.add(example); // } } if (config.getSortInfo().getSortField() != null) { final String sortField = config.getSortInfo().getSortField(); Collections.sort( queryResult, config .getSortInfo() .getSortDir() .comparator( new Comparator<Example>() { @Override public int compare(Example o1, Example o2) { return ((String) o1.get(sortField)).compareTo((String) o2.get(sortField)); } })); } int start = config.getOffset(); int end = queryResult.size(); if (limit > 0) { end = Math.min(start + limit, end); } // queryResult = queryResult.subList(start, end); ArrayList<Example> tmp = new ArrayList<Example>(); for (int i = start; i < end; i++) { tmp.add(queryResult.get(i)); } PagingLoadResult<Example> result = new BasePagingLoadResult<Example>(tmp); result.setOffset(offset); result.setTotalLength(totalLength); return result; }
public void add(Statement stmt, DataSet datastore) { RDFNode object = stmt.getObject(); if (object.isLiteral()) { if (object.asLiteral().getDatatype() != null) { if (object.asLiteral().getDatatype() instanceof XSDBaseNumericType) { integers.add(stmt, datastore); } else if (object.asLiteral().getDatatype() instanceof XSDDouble) { doubles.add(stmt, datastore); } else if (object.asLiteral().getDatatype() instanceof XSDFloat) { floats.add(stmt, datastore); } else if (object.asLiteral().getDatatype() instanceof XSDYearType) { gYears.add(stmt, datastore); } else if (object.asLiteral().getDatatype() instanceof XSDDateType) { dates.add(stmt, datastore); } else if (object.asLiteral().getDatatype() instanceof XSDYearMonthType) { gYearMonths.add(stmt, datastore); } else if (object.asLiteral().getDatatype() instanceof XSDMonthDayType) { gMonthDays.add(stmt, datastore); } else if (object.asLiteral().getDatatype() instanceof XSDTimeType) { times.add(stmt, datastore); } // else if (object.asLiteral().getDatatype() instanceof XSDboolean) // { // System.out.println(" -> XSDboolean"); // booleans.add(stmt); // System.out.println("datatype:" + // object.asLiteral().getDatatype()); // } // else if (object.asLiteral().getDatatypeURI().contains("usDollar")) // { // doubles.add(stmt); // } // else if (object.asLiteral().getDatatypeURI().contains("euro")) // { // doubles.add(stmt); // } else if (object.asLiteral().getDatatypeURI().contains("dbpedia.org/datatype")) { doubles.add(stmt, datastore); } else { unknowns.add(stmt, datastore); Evaluator.printStatement(stmt, "LiteralSwitch - unable to add an unknown data type:"); System.out.println("datatype:" + object.asLiteral().getDatatype()); System.out.println("datatype uri:" + object.asLiteral().getDatatypeURI()); } } else if (object.asLiteral().getLanguage() != null) { strings.add(stmt, datastore); } else { Evaluator.printStatement(stmt, "LiteralSwitch - unable to add an unknown literal type:"); System.out.println("data type:" + object.asLiteral().getDatatype()); System.out.println("data type uri:" + object.asLiteral().getDatatypeURI()); } } }
@Test(enabled = false) public void countTriplesFromVirtuoso() throws Exception { StringBuilder sb = new StringBuilder(); sb.append("PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> "); sb.append("PREFIX owl: <http://www.w3.org/2002/07/owl#> "); sb.append("PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> "); sb.append("PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> "); sb.append("PREFIX fem: <http://www.sifemontologies.com/ontologies/FiniteElementModel.owl#> "); sb.append("PREFIX pak: <http://www.sifemontologies.com/ontologies/FEMSettingsPAK.owl#> "); sb.append("PREFIX sim: <http://www.sifemontologies.com/ontologies/Simulation.owl#> "); sb.append( "SELECT ?material ?materialType ?node ?xCoord ?yCoord ?zCoord ?translationX ?translationY ?translationZ ?subDomain ?id "); sb.append( "FROM NAMED <http://www.sifemontologies.com/ontologies/Simulation/DisplacementInstance_0> "); sb.append("WHERE { "); sb.append("GRAPH ?g { "); sb.append( "?material rdf:type fem:Material . ?material fem:hasMaterialNumber ?y . ?material pak:hasMaterialSettings ?z . ?z pak:MATTYPE ?materialType. FILTER (?y=1) . ?node rdf:type fem:Node . "); sb.append( "?node fem:isNodeOf ?subDomain. ?node fem:hasNodeID ?id. ?subDomain fem:makesUp ?subDomainGroup. ?subDomainGroup fem:hasMaterial ?material. ?node fem:hasXCoordinate ?xCoord . ?node fem:hasYCoordinate ?yCoord . ?node fem:hasZCoordinate ?zCoord . ?node fem:holdsValueFor ?b . ?b rdf:type fem:Translation. ?b sim:hasVectorValue ?a . ?a sim:isReal true . ?a sim:hasVectorXValue ?translationX . ?a sim:hasVectorYValue ?translationY . ?a sim:hasVectorZValue ?translationZ . "); sb.append("} "); sb.append("}"); try { List<String> queryVariables = new ArrayList<String>(); queryVariables.add("material"); queryVariables.add("materialType"); queryVariables.add("node"); queryVariables.add("xCoord"); queryVariables.add("yCoord"); queryVariables.add("zCoord"); queryVariables.add("translationX"); queryVariables.add("translationY"); queryVariables.add("translationZ"); queryVariables.add("subDomain"); queryVariables.add("id"); Map<String, List<RDFNode>> queryModel = queryModel(sb.toString(), queryVariables, virtDataset); // List<RDFNode> materials = queryModel.get(queryVariables.get(0)); // List<RDFNode> materialTypes = queryModel.get(queryVariables.get(1)); List<RDFNode> nodes = queryModel.get(queryVariables.get(2)); List<RDFNode> xCoords = queryModel.get(queryVariables.get(3)); // List<RDFNode> yCoords = queryModel.get(queryVariables.get(4)); // List<RDFNode> zCoords = queryModel.get(queryVariables.get(5)); List<RDFNode> translationXs = queryModel.get(queryVariables.get(6)); // List<RDFNode> translationYs = queryModel.get(queryVariables.get(7)); // List<RDFNode> translationZs = queryModel.get(queryVariables.get(8)); List<RDFNode> subDomains = queryModel.get(queryVariables.get(9)); List<RDFNode> ids = queryModel.get(queryVariables.get(10)); System.out.println("Nodes number: " + nodes.size()); // Set<RDFNode> nodesCovered = new HashSet<RDFNode>(); Map<RDFNode, Float> nodeXDisplacement = new HashMap<RDFNode, Float>(); Map<Integer, RDFNode> idNodeMap = new HashMap<Integer, RDFNode>(); Map<RDFNode, Float> nodeXCoord = new HashMap<RDFNode, Float>(); int size = subDomains.size(); for (int i = 0; i < size; i++) { Float displacementX = translationXs.get(i).asLiteral().getFloat(); Float currentXCoord = xCoords.get(i).asLiteral().getFloat(); System.out.println(currentXCoord); RDFNode node = nodes.get(i); RDFNode id = ids.get(i); int nodeNumber = id.asLiteral().getInt(); nodeXDisplacement.put(node, displacementX); nodeXCoord.put(node, currentXCoord); // nodesCovered.add(node); idNodeMap.put(nodeNumber, node); } System.out.println("Final Nodes Remaining: " + idNodeMap.size()); List<Double> datax = new ArrayList<Double>(); List<Double> datay = new ArrayList<Double>(); List<Integer> nodeSortedIds = new ArrayList<Integer>(); for (int i : idNodeMap.keySet()) { nodeSortedIds.add(i); } Collections.sort(nodeSortedIds); Float previousXCoord = Float.MIN_VALUE; for (Integer id : nodeSortedIds) { RDFNode node = idNodeMap.get(id); Float displacement = nodeXDisplacement.get(node); Float currentXCoord = nodeXCoord.get(node); if (currentXCoord > previousXCoord) { datax.add(currentXCoord.doubleValue()); datay.add(displacement.doubleValue()); previousXCoord = currentXCoord; } } System.out.println(nodeSortedIds.size()); System.out.println(datax); System.out.println(datax.size()); System.out.println(datay); System.out.println(datay.size()); FeatureExtractor f = new FeatureExtractor(); // f.plot2dGFeat(datax, "NodesInIncreasingOrder", datay, "XDisplacement", // "DisplacementGraph","C:/Users/JoaoBoscoJares/Desktop/tmp/sifem2bkp/SifemCore/src/test/resources/" ); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } finally { sb = new StringBuilder(); } }
@Test(enabled = false) public void generateWithTTLFromSource() { String datTurtle = "src/test/resources/dat_input.ttl"; String unvTurtle = "src/test/resources/unv_output.ttl"; JenaModel model = new JenaModel(); model.importDataService(datTurtle); model.importDataService(unvTurtle); // String query = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n" + // "PREFIX owl: <http://www.w3.org/2002/07/owl#>\n"+ // "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>\n"+ // "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>\n"+ // "PREFIX fem: <http://www.sifemontologies.com/ontologies/FiniteElementModel.owl#>\n"+ // "PREFIX pak: <http://www.sifemontologies.com/ontologies/FEMSettingsPAK.owl#>\n"+ // "PREFIX sim: <http://www.sifemontologies.com/ontologies/Simulation.owl#>\n"+ // "SELECT ?material ?materialType ?node ?xCoord ?yCoord ?zCoord ?translationX ?translationY // ?translationZ ?subDomain ?id WHERE { ?material rdf:type fem:Material . ?material // fem:hasMaterialNumber ?y . ?material pak:hasMaterialSettings ?z . ?z pak:MATTYPE // ?materialType. FILTER (?y=1) . ?node rdf:type fem:Node . " + // "?node fem:isNodeOf ?subDomain. ?node fem:hasNodeID ?id. ?subDomain fem:makesUp // ?subDomainGroup. ?subDomainGroup fem:hasMaterial ?material. ?node fem:hasXCoordinate ?xCoord // . ?node fem:hasYCoordinate ?yCoord . ?node fem:hasZCoordinate ?zCoord . ?node // fem:holdsValueFor ?b . ?b rdf:type fem:Translation. ?b sim:hasVectorValue ?a . ?a sim:isReal // true . ?a sim:hasVectorXValue ?translationX . ?a sim:hasVectorYValue ?translationY . ?a // sim:hasVectorZValue ?translationZ . } " + // ""; String query2 = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> " + "PREFIX owl: <http://www.w3.org/2002/07/owl#> " + "PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> " + "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " + "PREFIX fem: <http://www.sifemontologies.com/ontologies/FiniteElementModel.owl#> " + "PREFIX pak: <http://www.sifemontologies.com/ontologies/FEMSettingsPAK.owl#> " + "PREFIX sim: <http://www.sifemontologies.com/ontologies/Simulation.owl#> " + "SELECT ?material ?materialType ?node ?xCoord ?yCoord ?zCoord ?translationX ?translationY ?translationZ ?subDomain ?id " + "WHERE { " + "?material rdf:type fem:Material . ?material fem:hasMaterialNumber ?y . ?material pak:hasMaterialSettings ?z . ?z pak:MATTYPE ?materialType. FILTER (?y=1) . ?node rdf:type fem:Node . " + "?node fem:isNodeOf ?subDomain. ?node fem:hasNodeID ?id. ?subDomain fem:makesUp ?subDomainGroup. ?subDomainGroup fem:hasMaterial ?material. ?node fem:hasXCoordinate ?xCoord . ?node fem:hasYCoordinate ?yCoord . ?node fem:hasZCoordinate ?zCoord . ?node fem:holdsValueFor ?b . ?b rdf:type fem:Translation. ?b sim:hasVectorValue ?a . ?a sim:isReal true . ?a sim:hasVectorXValue ?translationX . ?a sim:hasVectorYValue ?translationY . ?a sim:hasVectorZValue ?translationZ . " + "} "; List<String> queryVariables = new ArrayList<String>(); queryVariables.add("material"); queryVariables.add("materialType"); queryVariables.add("node"); queryVariables.add("xCoord"); queryVariables.add("yCoord"); queryVariables.add("zCoord"); queryVariables.add("translationX"); queryVariables.add("translationY"); queryVariables.add("translationZ"); queryVariables.add("subDomain"); queryVariables.add("id"); Map<String, List<RDFNode>> queryModel = model.queryModelService(query2, queryVariables); // List<RDFNode> materials = queryModel.get(queryVariables.get(0)); // List<RDFNode> materialTypes = queryModel.get(queryVariables.get(1)); List<RDFNode> nodes = queryModel.get(queryVariables.get(2)); List<RDFNode> xCoords = queryModel.get(queryVariables.get(3)); // List<RDFNode> yCoords = queryModel.get(queryVariables.get(4)); // List<RDFNode> zCoords = queryModel.get(queryVariables.get(5)); List<RDFNode> translationXs = queryModel.get(queryVariables.get(6)); // List<RDFNode> translationYs = queryModel.get(queryVariables.get(7)); // List<RDFNode> translationZs = queryModel.get(queryVariables.get(8)); List<RDFNode> subDomains = queryModel.get(queryVariables.get(9)); List<RDFNode> ids = queryModel.get(queryVariables.get(10)); System.out.println("Nodes number: " + nodes.size()); // Set<RDFNode> nodesCovered = new HashSet<RDFNode>(); Map<RDFNode, Float> nodeXDisplacement = new HashMap<RDFNode, Float>(); Map<Integer, RDFNode> idNodeMap = new HashMap<Integer, RDFNode>(); Map<RDFNode, Float> nodeXCoord = new HashMap<RDFNode, Float>(); int size = subDomains.size(); for (int i = 0; i < size; i++) { Float displacementX = translationXs.get(i).asLiteral().getFloat(); Float currentXCoord = xCoords.get(i).asLiteral().getFloat(); System.out.println(currentXCoord); RDFNode node = nodes.get(i); RDFNode id = ids.get(i); int nodeNumber = id.asLiteral().getInt(); nodeXDisplacement.put(node, displacementX); nodeXCoord.put(node, currentXCoord); // nodesCovered.add(node); idNodeMap.put(nodeNumber, node); } System.out.println("Final Nodes Remaining: " + idNodeMap.size()); List<Double> datax = new ArrayList<Double>(); List<Double> datay = new ArrayList<Double>(); List<Integer> nodeSortedIds = new ArrayList<Integer>(); for (int i : idNodeMap.keySet()) nodeSortedIds.add(i); Collections.sort(nodeSortedIds); Float previousXCoord = Float.MIN_VALUE; for (Integer id : nodeSortedIds) { RDFNode node = idNodeMap.get(id); Float displacement = nodeXDisplacement.get(node); Float currentXCoord = nodeXCoord.get(node); if (currentXCoord > previousXCoord) { datax.add(currentXCoord.doubleValue()); datay.add(displacement.doubleValue()); previousXCoord = currentXCoord; } } System.out.println(nodeSortedIds.size()); System.out.println(datax); System.out.println(datax.size()); System.out.println(datay); System.out.println(datay.size()); FeatureExtractor f = new FeatureExtractor(); // f.plot2dGFeat(datax, "NodesInIncreasingOrder", datay, "XDisplacement", // "DisplacementGraph","C:/Users/JoaoBoscoJares/Desktop/tmp/sifem2bkp/SifemCore/src/test/resources/" ); }
private static void addAdditionalProperties() { // Map <predicate,value> save each interesting predicate of the URI object Map<Property, List<RDFNode>> resourceInterestingInfoExtension = new HashMap<Property, List<RDFNode>>(); // Map<object,objectResourceData> to save each object with its related data resource and be // retrieved whenever a URI object data needed to be added for extension Map<RDFNode, Map<Property, List<RDFNode>>> objectWithInfoAttached = new HashMap<RDFNode, Map<Property, List<RDFNode>>>(); // Get list of unique URI objects in the data source as http://dbpedia.org/resource/XXXX List<RDFNode> urisObjects = getURIObjects(); // Get information for each single distinct objectURI according to interesting predicates logger.info("Number of unique URI object to find extension: " + urisObjects.size()); if (urisObjects.size() > 0) { // For each unique URI object, its predicate-value pairs are retrieved then add them attached // to their object in a map for (RDFNode uriObject : urisObjects) { // Retrieve all interesting <predicate,object> info. for current URI object resourceInterestingInfoExtension = getURIInfo(uriObject); // Add retrieved predicate-value pair attached to the object in the map objectWithInfoAttached.put( uriObject, resourceInterestingInfoExtension); // enriched list of objects } } else { // Otherwise no URI objects to be extended return; } List<Statement> triplesWithURIsObjects = getTriplesWithURIObjects(); logger.info("Starting model enriching"); if (triplesWithURIsObjects.size() > 0) { // iterate over each triple to add each URI object information to its resource subject for (Statement triple : triplesWithURIsObjects) { // put a hand over the required subject Resource enrichedResource = (Resource) triple.getObject(); // for the subject's related object in the enriched list get the related predicate-value // pairs Map<Property, List<RDFNode>> objectPredicateValuePairs = objectWithInfoAttached.get(triple.getObject()); int i = 0; for (Property predicate : objectPredicateValuePairs.keySet()) { for (RDFNode value : objectPredicateValuePairs.get(predicate)) { Property outputProperty = (i < outputProperties.size()) ? outputProperties.get(i) : defaultOutputProperty; if (value.isLiteral()) { localModel.add(enrichedResource, outputProperty, value.asLiteral().toString()); logger.info( "Triple found: <" + enrichedResource + "> <" + outputProperty + "> \"" + value.toString() + "\""); } else { localModel.add(enrichedResource, outputProperty, value); logger.info( "Triple found: <" + enrichedResource + "> <" + outputProperty + "> <" + value + ">"); } } i++; } } } }
/** * @param predicates: list of targeted predicates to enrich the model It calls * getTriplesWithObjectsAreURI() method retrieving list of triples in model having URI-typed * objects. For each object of them, it is checked if it is in DBpedia (can be extended later) * then calls getURIInfo() method to dereference the URI-typed object in HashMap and retrieve * the targeted predicates values "if exist", it iterates over the HashMap and add them to the * resources in the model. */ private static void addAdditionalPropertiesUsingBlankNode(Map<String, String> predicates) { // Map <predicate,value> save each interesting predicate of the URI object Map<Property, List<RDFNode>> resourceInterestingInfoExtension = new HashMap<Property, List<RDFNode>>(); // Map<object,objectResourceData> to save each object with its related data resource and be // retrieved whenever a URI object data needed to be added for extension Map<RDFNode, Resource> objectFilledResource = new HashMap<RDFNode, Resource>(); // Get list of unique URI objects in the data source as http://dbpedia.org/resource/XXXX List<RDFNode> urisObjects = getURIObjects(); // Get information for each single distinct objectURI according to interesting predicates logger.info("Number of unique URI object to find extension: " + urisObjects.size()); if (urisObjects.size() > 0) { // The object resource that will have each URI object extended data Resource object = null; int count = 1; // For each URI object a resource is created, filled with information,add the object with its // data resource into map for (RDFNode uriObject : urisObjects) { logger.info("Predicate " + count++ + " of " + urisObjects.size() + ":" + uriObject); // Create a resource with empty node object = localModel.createResource(); // Retrieve all interesting <predicate,object> info. for such URI object resourceInterestingInfoExtension = DereferencingModule.getURIInfo(uriObject); // Add information to the resource for (Property key : resourceInterestingInfoExtension.keySet()) { // add the new properties to the new triple List<RDFNode> subjects = resourceInterestingInfoExtension.get(key); for (RDFNode subject : subjects) { if (subject.isLiteral()) { object.addProperty(key, subject.asLiteral().toString()); } else { object.addProperty(key, subject); } } } // Add to list of object's resource that is filled with information objectFilledResource.put(uriObject, object); } } else { // Otherwise no URI objects to be extended return; } List<Statement> triplesWithURIsObjects = getTriplesWithURIObjects(); logger.info("Starting model enriching"); if (triplesWithURIsObjects.size() > 0) { Resource object = null; // iterate over each triple to dereference each URI object and add its information to its // resource subject for (Statement triple : triplesWithURIsObjects) { // create new triple with empty node as its subject where this subject will be an object of // the targeted resource to be extended if (!objectFilledResource.containsKey(triple.getSubject())) { object = objectFilledResource.get(triple.getObject()); objectsDerefModelAdded.put(triple.getObject(), object); // Attach the object's resource to this subject Resource resource = localModel.getResource(triple.getSubject().getURI()); resource.addProperty(defaultOutputProperty, object); resourceInterestingInfoExtension = null; } } } }
/** * @param uri : the URI to be dereferenced * @param predicates : targeted predicates to be added to enrich the model * @return This method retrieves list of values for targeted predicates for a URI-typed object for * each URI-typed object, through content negotiation an open connection is done retrieving * its predicates/values. An iteration is made over targeted predicates. For each predicate * list of statements with the targeted predicate is retrieved and extracting its value in * order to be added to hashmap<predicate,Value> */ @SuppressWarnings("unchecked") private static HashMap<Property, List<RDFNode>> getURIInfo(RDFNode p) { String uri = p.asResource().getURI(); // to store each predicate and its value HashMap<Property, List<RDFNode>> resourceFocusedInfo = new HashMap<Property, List<RDFNode>>(); if (demo) { // Deserialize the results if exists (For Demo purpose) if (useCache) { try { HashMap<String, List<String>> ser = new HashMap<String, List<String>>(); File file = new File("resourceFocusedInfo.ser"); if (file.exists()) { ObjectInputStream in = new ObjectInputStream(new FileInputStream(file)); ser = (HashMap<String, List<String>>) in.readObject(); in.close(); // convert every object back from string for (String prop : ser.keySet()) { List<String> l = ser.get(prop); List<RDFNode> nodes = new ArrayList<RDFNode>(); for (String n : l) { nodes.add(ResourceFactory.createResource(n)); } resourceFocusedInfo.put(ResourceFactory.createProperty(prop), nodes); } return resourceFocusedInfo; } } catch (Exception e) { e.printStackTrace(); } } } // define local model to have the data of the URI and extract focused info through built sparql // query List<RDFNode> values = new ArrayList<RDFNode>(); try { URLConnection conn = new URL(uri).openConnection(); conn.setRequestProperty("Accept", "application/rdf+xml"); conn.setRequestProperty("Accept-Language", "en"); Model model = ModelFactory.createDefaultModel(); InputStream in = conn.getInputStream(); model.read(in, null); for (Property inputProperty : inputProperties) { for (Statement st : model.listStatements(model.getResource(uri), inputProperty, (RDFNode) null).toList()) { RDFNode value = st.getObject(); if (value.isLiteral()) { if (value.asLiteral().getLanguage().toLowerCase().equals("en") || value.asLiteral().getLanguage().toLowerCase().equals("")) { values.add(value); } } else { values.add(value); } } resourceFocusedInfo.put(inputProperty, values); values = new ArrayList<RDFNode>(); // create new list for new predicate } } catch (Exception e) { e.printStackTrace(); } if (demo) { // serialize the output (for Demo purpose) try { HashMap<String, List<String>> ser = new HashMap<String, List<String>>(); FileOutputStream fileOut = new FileOutputStream("resourceFocusedInfo.ser"); ObjectOutputStream out = new ObjectOutputStream(fileOut); // convert to Serializabe Strings for (Property prop : resourceFocusedInfo.keySet()) { List<String> l = new ArrayList<String>(); for (RDFNode n : resourceFocusedInfo.get(prop)) { l.add(n.toString()); } ser.put(prop.toString(), l); } out.writeObject(ser); out.close(); } catch (Exception e2) { e2.printStackTrace(); } } return resourceFocusedInfo; }
Literal getObject() { return object.asLiteral(); }