/** * If the {@link ConfigurationProperties} has a name for the initial admin user, create the user * and add it to the model. */ protected void createInitialAdminUser(Model model) { String initialAdminUsername = ConfigurationProperties.getProperty("initialAdminUser"); if (initialAdminUsername == null) { return; } // A hard-coded MD5 encryption of "defaultAdmin" String initialAdminPassword = "******"; String vitroDefaultNs = DEFAULT_DEFAULT_NAMESPACE; Resource user = model.createResource(vitroDefaultNs + "defaultAdminUser"); model.add( model.createStatement( user, model.createProperty(VitroVocabulary.RDF_TYPE), model.getResource(VitroVocabulary.USER))); model.add( model.createStatement( user, model.createProperty(VitroVocabulary.USER_USERNAME), model.createTypedLiteral(initialAdminUsername))); model.add( model.createStatement( user, model.createProperty(VitroVocabulary.USER_MD5PASSWORD), model.createTypedLiteral(initialAdminPassword))); model.add( model.createStatement( user, model.createProperty(VitroVocabulary.USER_ROLE), model.createTypedLiteral("role:/50"))); }
private Model execute(Model inputModel, String endpoint) { Model cube = createModel(); Resource dataset; Calendar calendar = Calendar.getInstance(TimeZone.getDefault()); dataset = cube.createResource( GK.uri + "Properties_per_Class" + calendar.getTimeInMillis(), QB.Dataset); dataset.addLiteral(RDFS.comment, "Properties per class"); dataset.addLiteral(DCTerms.date, cube.createTypedLiteral(calendar)); dataset.addLiteral(DCTerms.publisher, "R & D, Unister GmbH, Geoknow"); dataset.addProperty(QB.structure, cube.createResource(STRUCTURE)); QueryExecution qExec; if (inputModel != null) { qExec = QueryExecutionFactory.create(INSTANCES, inputModel); } else { qExec = QueryExecutionFactory.sparqlService(endpoint, INSTANCES, defaultGraphs, defaultGraphs); } ResultSet result = qExec.execSelect(); int i = 0; while (result.hasNext()) { Resource owlClass = result.next().getResource("class"); NUMBER_OF_PROPERTIES.setIri("class", owlClass.getURI()); QueryExecution propertiesQexec; if (inputModel != null) { propertiesQexec = QueryExecutionFactory.create(NUMBER_OF_PROPERTIES.asQuery(), inputModel); } else { propertiesQexec = QueryExecutionFactory.sparqlService( endpoint, NUMBER_OF_PROPERTIES.asQuery(), defaultGraphs, defaultGraphs); System.out.println(NUMBER_OF_PROPERTIES.asQuery()); } try { ResultSet propertiesResult = propertiesQexec.execSelect(); if (propertiesResult.hasNext()) { System.out.println(i); Resource obs = cube.createResource( "http://www.geoknow.eu/data-cube/metric2/observation" + i, QB.Observation); obs.addProperty(QB.dataset, dataset); obs.addProperty(GK.DIM.Class, owlClass); obs.addLiteral(GK.MEASURE.PropertyCount, propertiesResult.next().getLiteral("count")); i++; } } catch (Exception e) { System.out.println(i); Resource obs = cube.createResource( "http://www.geoknow.eu/data-cube/metric2/observation" + i, QB.Observation); obs.addProperty(QB.dataset, dataset); obs.addProperty(GK.DIM.Class, owlClass); obs.addLiteral(GK.MEASURE.PropertyCount, -1); obs.addLiteral(RDFS.comment, e.getMessage()); i++; } } return cube; }
public void doCleanLiterals(Model model) { Model retractionsModel = ModelFactory.createDefaultModel(); Model additionsModel = ModelFactory.createDefaultModel(); model.enterCriticalSection(Lock.WRITE); try { ClosableIterator<Statement> closeIt = model.listStatements(); try { for (Iterator<Statement> stmtIt = closeIt; stmtIt.hasNext(); ) { Statement stmt = stmtIt.next(); if (stmt.getObject().isLiteral()) { Literal lit = (Literal) stmt.getObject(); String lex = lit.getLexicalForm(); char[] chars = lex.toCharArray(); char[] cleanChars = new char[chars.length]; int cleanPos = 0; boolean badChar = false; for (int i = 0; i < chars.length; i++) { if (java.lang.Character.getNumericValue(chars[i]) > 31 && java.lang.Character.isDefined(chars[i])) { cleanChars[cleanPos] = chars[i]; cleanPos++; } else { log.error("Bad char in " + lex); log.error("Numeric value " + java.lang.Character.getNumericValue(chars[i])); badChar = true; } } String cleanLex = new String(cleanChars); if (badChar) { retractionsModel.add(stmt); Literal newLit = null; if (lit.getLanguage() != null && lit.getLanguage().length() > 0) { newLit = additionsModel.createLiteral(cleanLex, lit.getLanguage()); } else if (lit.getDatatype() != null) { newLit = additionsModel.createTypedLiteral(cleanLex, lit.getDatatype()); } else { newLit = additionsModel.createLiteral(cleanLex); } additionsModel.add(stmt.getSubject(), stmt.getPredicate(), newLit); } } } } finally { closeIt.close(); } model.remove(retractionsModel); model.add(additionsModel); log.debug("Cleaned " + additionsModel.size() + " literals"); } finally { model.leaveCriticalSection(); } }
public static Statement createStatement(Triple triple) throws OREException { Model model = ModelFactory.createDefaultModel(); Resource resource = model.createResource(triple.getSubjectURI().toString()); Property property = model.createProperty(triple.getPredicate().getURI().toString()); RDFNode node; if (triple.isLiteral()) { node = model.createTypedLiteral(triple.getObjectLiteral()); } else { node = model.createResource(triple.getObjectURI().toString()); } Statement statement = model.createStatement(resource, property, node); return statement; }
private boolean checkDialog() { if (Contract.askUser()) { if (user != null) { model .createResource(Vocab.Agreement) .addProperty(Vocab.licensee, user) .addProperty(Vocab.software, software) .addProperty(Vocab.licensor, "Hewlett-Packard Development Company, LP") .addProperty(Vocab.agreementDate, model.createTypedLiteral(Calendar.getInstance())); try { FileOutputStream fos = new FileOutputStream(license); model.write(fos, "RDF/XML-ABBREV"); fos.close(); } catch (IOException e) { // ignore } } return true; } return false; }
public void doProcessStrings(VitroRequest vreq) { try { String className = vreq.getParameter("className"); String methodName = vreq.getParameter("methodName"); String propertyName = vreq.getParameter("propertyName"); String newPropertyName = vreq.getParameter("newPropertyName"); // for now, we'll make the destination and source models the same Model destination = getModel(vreq.getParameter("destinationModelName"), vreq); String processModel = vreq.getParameter("processModel"); Model savedAdditionsModel = null; Model savedRetractionsModel = null; String additionsModelStr = vreq.getParameter("additionsModel"); if ((additionsModelStr != null) && (additionsModelStr.length() > 0)) { savedAdditionsModel = getModel(additionsModelStr, vreq); } String retractionsModelStr = vreq.getParameter("retractionsModel"); if ((retractionsModelStr != null) && (retractionsModelStr.length() > 0)) { savedRetractionsModel = getModel(retractionsModelStr, vreq); } Model additionsModel = ModelFactory.createDefaultModel(); Model retractionsModel = ModelFactory.createDefaultModel(); Class<?> stringProcessorClass = Class.forName(className); Object processor = stringProcessorClass.newInstance(); Class<?>[] methArgs = {String.class}; Method meth = stringProcessorClass.getMethod(methodName, methArgs); Property prop = ResourceFactory.createProperty(propertyName); Property newProp = ResourceFactory.createProperty(newPropertyName); destination.enterCriticalSection(Lock.READ); try { ClosableIterator<Statement> closeIt = destination.listStatements((Resource) null, prop, (RDFNode) null); for (Iterator<Statement> stmtIt = closeIt; stmtIt.hasNext(); ) { Statement stmt = stmtIt.next(); if (stmt.getObject().isLiteral()) { Literal lit = (Literal) stmt.getObject(); String lex = lit.getLexicalForm(); Object[] args = {lex}; String newLex = ""; try { newLex = (String) meth.invoke(processor, args); } catch (InvocationTargetException e) { throw new RuntimeException(e); } if (!newLex.equals(lex)) { retractionsModel.add(stmt); if (newLex.length() > 0) { Literal newLit = null; if (lit.getLanguage() != null && lit.getLanguage().length() > 0) { newLit = additionsModel.createLiteral(newLex, lit.getLanguage()); } else if (lit.getDatatype() != null) { newLit = additionsModel.createTypedLiteral(newLex, lit.getDatatype()); } else { newLit = additionsModel.createLiteral(newLex); } additionsModel.add(stmt.getSubject(), newProp, newLit); } } } } if (processModel != null) { destination.add(additionsModel); destination.remove(retractionsModel); } if (savedAdditionsModel != null) { savedAdditionsModel.add(additionsModel); } if (savedRetractionsModel != null) { savedRetractionsModel.add(retractionsModel); } } finally { destination.leaveCriticalSection(); } } catch (Exception e) { throw new RuntimeException(e); } }
/** * Creates the main resource * * @param model * @param value * @return * @throws Exception */ protected Resource createOVResource() throws Exception { Resource resource = null; String subjuri = null; if (resourceId != null) { subjuri = this.uristr; } else { subjuri = ov.getRemote_uri(); } resource = rdfData.createResource(subjuri); String item = ov.getBase_name(); if (item != null && item.trim().compareTo("") != 0) { if (item.startsWith("http://")) { resource.addProperty(CorelfVocab.BASE_NAME, rdfData.createResource(item)); } else { resource.addProperty(CorelfVocab.BASE_NAME, rdfData.createTypedLiteral(item)); } } item = ov.getBase_ov_name(); if (item != null && item.trim().compareTo("") != 0) { if (item.startsWith("http://")) { resource.addProperty(CorelfVocab.BASE_OV_NAME, rdfData.createResource(item)); } else { resource.addProperty(CorelfVocab.BASE_OV_NAME, rdfData.createTypedLiteral(item)); } } item = ov.getObserved_property(); if (item != null && item.trim().compareTo("") != 0) { if (item.startsWith("http://")) { resource.addProperty(SptVocab.OBSERVED_PROPERTY, rdfData.createResource(item)); } else { resource = addObservedProperty(resource, item, SptVocab.OBSERVED_PROPERTY); } } item = ov.getUnit_of_measurement(); if (item != null && item.trim().compareTo("") != 0) { if (item.startsWith("http://")) { resource.addProperty(SptVocab.UOM, rdfData.createResource(item)); } else { resource = addUom(resource, item); } } String[] tprops = ov.getTsproperties(); if (tprops != null) { for (int i = 0; i < tprops.length; i++) { if (tprops[i].startsWith("http://")) { resource.addProperty(SptVocab.TEMPORAL, rdfData.createResource(tprops[i])); } else { resource.addProperty(SptVocab.TEMPORAL, rdfData.createTypedLiteral(tprops[i])); } } } String[] vals = ov.getValues(); if (vals != null) { for (int i = 0; i < vals.length; i++) { if (vals[i] != null) { if (vals[i].startsWith("http://")) { resource.addProperty(SptVocab.OUT, rdfData.createResource(vals[i])); } else { resource.addProperty(SptVocab.OUT, vals[i]); } } } } resource = crossResourcesAnnotation(ov, resource); return resource; }
private Model execute(Model inputModel, String endpoint) { Model cube = createModel(); Resource dataset; Calendar calendar = Calendar.getInstance(TimeZone.getDefault()); dataset = cube.createResource(GK.uri + "Average_Surface", QB.Dataset); dataset.addLiteral(RDFS.comment, "Average Surface per class"); dataset.addLiteral(DCTerms.date, cube.createTypedLiteral(calendar)); dataset.addLiteral(DCTerms.publisher, "R & D, Unister GmbH, Geoknow"); dataset.addProperty(QB.structure, cube.createResource(STRUCTURE)); if (endpoint != null) { dataset.addProperty(DCTerms.source, endpoint); } QueryExecution qExec; if (inputModel != null) { qExec = QueryExecutionFactory.create(GET_CLASSES, inputModel); } else { qExec = QueryExecutionFactory.sparqlService(endpoint, GET_CLASSES, defaultGraphs, defaultGraphs); } ResultSet result = qExec.execSelect(); int obsCount = 0; while (result.hasNext()) { double area = 0; int i = 0; Resource owlClass = result.next().get("class").asResource(); if (!blacklist.contains(owlClass.toString())) { System.out.println(owlClass); GET_INSTANCES.setIri("class", owlClass.getURI()); QueryExecution qexecInstances; if (inputModel != null) { qexecInstances = QueryExecutionFactory.create(GET_INSTANCES.asQuery(), inputModel); } else { qexecInstances = QueryExecutionFactory.sparqlService( endpoint, GET_INSTANCES.asQuery(), defaultGraphs, defaultGraphs); } for (ResultSet instancesResult = qexecInstances.execSelect(); instancesResult.hasNext(); ) { QuerySolution next = instancesResult.next(); String instance = next.get("instance").asResource().getURI(); if (instance == null) { continue; } POLYGON.setIri("instance", instance); QueryExecution qexecMember; if (inputModel != null) { qexecMember = QueryExecutionFactory.create(POLYGON.asQuery(), inputModel); } else { qexecMember = QueryExecutionFactory.sparqlService( endpoint, POLYGON.asQuery(), defaultGraphs, defaultGraphs); } StringBuilder polygonBuilder = new StringBuilder(); firstLat = null; firstLong = null; for (ResultSet latLong = qexecMember.execSelect(); latLong.hasNext(); ) { processPoint(latLong.next(), polygonBuilder); } if (polygonBuilder.length() > 0) { area += calculateArea(polygonBuilder); } else { area = 0; polygonBuilder.setLength(0); this.firstLat = null; this.firstLong = null; MULTI_POLYGON.setIri("instance", instance); QueryExecution qexecMultiPolygon; if (inputModel != null) { qexecMultiPolygon = QueryExecutionFactory.create(MULTI_POLYGON.asQuery(), inputModel); } else { qexecMultiPolygon = QueryExecutionFactory.sparqlService( endpoint, MULTI_POLYGON.asQuery(), defaultGraphs, defaultGraphs); } String polygonName = ""; for (ResultSet latLong = qexecMultiPolygon.execSelect(); latLong.hasNext(); ) { QuerySolution solution = latLong.next(); if (!polygonName.equals(solution.get("polygon").asNode().getBlankNodeLabel())) { if (polygonBuilder.length() > 0) { area += calculateArea(polygonBuilder); } this.firstLat = null; this.firstLong = null; polygonBuilder.setLength(0); } polygonName = solution.get("polygon").asNode().getBlankNodeLabel(); processPoint(solution, polygonBuilder); } } i++; } } Resource obs = cube.createResource(structureUri + "/obs/" + obsCount, QB.Observation); double average = i == 0 ? 0 : area / i; obs.addProperty(GK.MEASURE.Average, cube.createTypedLiteral(average)); obs.addProperty(GK.DIM.Class, owlClass); obs.addProperty(QB.dataset, dataset); obsCount++; } return cube; }
private void updateModel( HttpServletRequest req, HttpServletResponse resp, Property[] permissions, ModelUpdater modelUpdater) throws ServletException, IOException { Query query = getQuery(req, resp); ServletContext context = getServletContext(); if (query == null) return; if (performPermissionsCheck(context, query, resp, permissions)) return; String uri = query.getURI(); AbstractSerializer as = serializer.get(query.getContentType()); if (as == null || !(as instanceof JenaSerializer)) { resp.setStatus( query.negotiatedContentType() ? HttpServletResponse.SC_BAD_REQUEST : HttpServletResponse.SC_NOT_FOUND); return; } Model model = ModelFactory.createDefaultModel(); model.read(req.getInputStream(), query.getURI(), ((JenaSerializer) as).getSerialization()); Model target = dataset.getNamedModel(uri); Set<Statement> before = target.listStatements().toSet(); modelUpdater.updateModel(model, uri); Set<Statement> after = target.listStatements().toSet(); Set<Statement> intersection = new HashSet<Statement>(before); intersection.retainAll(after); before.removeAll(intersection); // All those removed after.removeAll(intersection); // All those added Model changesetModel = ModelFactory.createDefaultModel(); Resource changeset = changesetModel.createResource(); Literal date = changesetModel.createTypedLiteral( dateFormat.format(new Date()), new BaseDatatype(XSD.date.getURI())); changeset .addProperty(Namespaces.rdf.p("type"), CS.ChangeSet) .addProperty(Namespaces.dc.p("date"), date) .addProperty(CS.subjectOfChange, changesetModel.createResource(query.getURI())) .addProperty(DC.creator, query.getUser()); for (Statement statement : before) changeset.addProperty(Namespaces.cs.p("removal"), statement.createReifiedStatement()); for (Statement statement : after) changeset.addProperty(Namespaces.cs.p("addition"), statement.createReifiedStatement()); String filename = query.getURL().getPath() + "/" + date.getLexicalForm() + ".n3"; File file = new File(logDirectory, filename); file.getParentFile().mkdirs(); FileWriter ow = new FileWriter(new File(logDirectory.getPath(), filename)); changesetModel.write(ow, "N3"); ow.close(); resp.setStatus(200); }