@Test public void shouldSortTriplesForDisplay() { final Model model = createDefaultModel(); model.setNsPrefix("prefix", "namespace"); final Property propertyA = model.createProperty("namespace", "a"); final Property propertyB = model.createProperty("namespace", "b"); final Property propertyC = model.createProperty("c"); final Literal literalA = model.createLiteral("a"); final Literal literalB = model.createLiteral("b"); final Resource resourceB = model.createResource("b"); model.add(resourceB, propertyA, literalA); final Resource a = model.createResource("a"); model.add(a, propertyC, literalA); model.add(a, propertyB, literalA); model.add(a, propertyA, literalA); model.add(a, propertyA, literalB); final Iterator<Quad> iterator = DatasetFactory.create(model).asDatasetGraph().find(); final List<Quad> sortedTriples = testObj.getSortedTriples(model, iterator); sortedTriples.get(0).matches(ANY, a.asNode(), propertyA.asNode(), literalA.asNode()); sortedTriples.get(1).matches(ANY, a.asNode(), propertyA.asNode(), literalB.asNode()); sortedTriples.get(2).matches(ANY, a.asNode(), propertyB.asNode(), literalA.asNode()); sortedTriples.get(3).matches(ANY, a.asNode(), propertyC.asNode(), literalA.asNode()); sortedTriples.get(4).matches(ANY, resourceB.asNode(), propertyC.asNode(), literalA.asNode()); }
private Model createModel() { Model cubeData = ModelFactory.createDefaultModel(); Resource structure = cubeData.createResource(STRUCTURE, QB.DataStructureDefinition); Resource c1 = cubeData.createResource(STRUCTURE + "/c1", QB.ComponentSpecification); c1.addProperty(RDFS.label, cubeData.createLiteral("Component Specification of Class", "en")); c1.addProperty(QB.dimension, GK.DIM.Class); Resource c2 = cubeData.createResource(STRUCTURE + "/c2", QB.ComponentSpecification); c2.addProperty(QB.measure, GK.MEASURE.PropertyCount); c2.addProperty( RDFS.label, cubeData.createLiteral("Component Specification of Number of Properties", "en")); // Resource c3 = cubeData.createResource(STRUCTURE+"/c3",QB.ComponentSpecification); // c3.addProperty(RDFS.label, cubeData.createLiteral("Component Specification of Instance", // "en")); // c3.addProperty(QB.dimension, GK.DIM.Instance); structure.addProperty(QB.component, c1); structure.addProperty(QB.component, c2); // structure.addProperty(QB.component, c3); cubeData.add(GK.DIM.ClassStatements); cubeData.add(GK.DIM.PropertyStatements); cubeData.add(GK.DIM.InstanceStatements); cubeData.add(GK.MEASURE.PropertyCountStatements); return cubeData; }
private Model execute(Model inputModel, String endpoint) { Model cube = createModel(); Resource dataset; Calendar calendar = Calendar.getInstance(TimeZone.getDefault()); dataset = cube.createResource( GK.uri + "Properties_per_Class" + calendar.getTimeInMillis(), QB.Dataset); dataset.addLiteral(RDFS.comment, "Properties per class"); dataset.addLiteral(DCTerms.date, cube.createTypedLiteral(calendar)); dataset.addLiteral(DCTerms.publisher, "R & D, Unister GmbH, Geoknow"); dataset.addProperty(QB.structure, cube.createResource(STRUCTURE)); QueryExecution qExec; if (inputModel != null) { qExec = QueryExecutionFactory.create(INSTANCES, inputModel); } else { qExec = QueryExecutionFactory.sparqlService(endpoint, INSTANCES, defaultGraphs, defaultGraphs); } ResultSet result = qExec.execSelect(); int i = 0; while (result.hasNext()) { Resource owlClass = result.next().getResource("class"); NUMBER_OF_PROPERTIES.setIri("class", owlClass.getURI()); QueryExecution propertiesQexec; if (inputModel != null) { propertiesQexec = QueryExecutionFactory.create(NUMBER_OF_PROPERTIES.asQuery(), inputModel); } else { propertiesQexec = QueryExecutionFactory.sparqlService( endpoint, NUMBER_OF_PROPERTIES.asQuery(), defaultGraphs, defaultGraphs); System.out.println(NUMBER_OF_PROPERTIES.asQuery()); } try { ResultSet propertiesResult = propertiesQexec.execSelect(); if (propertiesResult.hasNext()) { System.out.println(i); Resource obs = cube.createResource( "http://www.geoknow.eu/data-cube/metric2/observation" + i, QB.Observation); obs.addProperty(QB.dataset, dataset); obs.addProperty(GK.DIM.Class, owlClass); obs.addLiteral(GK.MEASURE.PropertyCount, propertiesResult.next().getLiteral("count")); i++; } } catch (Exception e) { System.out.println(i); Resource obs = cube.createResource( "http://www.geoknow.eu/data-cube/metric2/observation" + i, QB.Observation); obs.addProperty(QB.dataset, dataset); obs.addProperty(GK.DIM.Class, owlClass); obs.addLiteral(GK.MEASURE.PropertyCount, -1); obs.addLiteral(RDFS.comment, e.getMessage()); i++; } } return cube; }
public Resource createGLIFinstance(String bp_type, String id, AttribGroupSet ags) { Resource r = rdfModel.createResource(KB_NS_PREFIX + "GLIF3_5_BitPathways_Class_" + classIndex); classIndex++; String type = getGLIFtype(bp_type); Resource r_type = rdfModel.createResource(GLIF_Factory.KB_NS_PREFIX + type); r.addProperty(RDF.type, r_type); String label = ags.getSpecialValue("label")[0].toString(); if (label == null) label = ""; r.addProperty(RDFS.label, label); r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "name"), label); if (type.equalsIgnoreCase("Patient_State")) { r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "display_name"), label); } if (bp_type.equalsIgnoreCase("TASK")) { addTaskProperties(r, ags); } else if (bp_type.equalsIgnoreCase("DECISION")) { addDecisionProperties(r, ags); } else if (bp_type.equalsIgnoreCase("START")) { addStartProperties(r, ags); } else if (bp_type.equalsIgnoreCase("END")) { addEndProperties(r, ags); } else if (bp_type.equalsIgnoreCase("SUBPATH")) { addSubpathProperties(r, ags); } stepSet.put(id, r); return r; }
public static void main(String args[]) { // some definitions String personURI = "http://somewhere/JohnSmith"; String givenName = "John"; String familyName = "Smith"; String fullName = givenName + " " + familyName; // create an empty model Model model = ModelFactory.createDefaultModel(); // create the resource // and add the properties cascading style Resource johnSmith = model .createResource(personURI) .addProperty(VCARD.FN, fullName) .addProperty( VCARD.N, model .createResource() .addProperty(VCARD.Given, givenName) .addProperty(VCARD.Family, familyName)); // now write the model in XML form to a file model.write(System.out); }
private void initializeGraph() { mGraph = new SimpleMGraph(); com.hp.hpl.jena.graph.Graph graph = new JenaGraph(mGraph); Model model = ModelFactory.createModelForGraph(graph); // create the resource // and add the properties cascading style String URI = "http://example.org/"; model .createResource(URI + "A") .addProperty(model.createProperty(URI + "B"), "C") .addProperty( model.createProperty(URI + "D"), model .createResource() .addProperty(model.createProperty(URI + "E"), "F") .addProperty(model.createProperty(URI + "G"), "H")); mGraph.add( new TripleImpl( new UriRef("http://foo/bar"), new UriRef("http://foo/bar"), LiteralFactory.getInstance().createTypedLiteral("foo"))); mGraph.add( new TripleImpl( new UriRef("http://foo/bar"), new UriRef("http://foo/bar"), LiteralFactory.getInstance().createTypedLiteral(54675))); mGraph.add( new TripleImpl(new BNode(), new UriRef("http://foo/bar"), new UriRef("http://foo/bar"))); }
@Override public Vertex addVertex(final Object id) { if (id == null) { Resource createResource = model.createResource(); return new JenaVertex(model, createResource); } return new JenaVertex(model, model.createResource(id.toString())); }
public void setUp() { model = getModel(); Resource S2 = model.createResource(anchor + "subject2"); S = model.createResource(anchor + "subject"); P = model.createProperty(anchor + "predicate"); O = model.createLiteral(anchor + "object"); SPO = model.createStatement(S, P, O); SPO2 = model.createStatement(S2, P, O); }
@Override public Edge addEdge( final Object id, final Vertex outVertex, final Vertex inVertex, final String label) { final Resource inV = model.createResource(inVertex.getId().toString()); final Resource outV = model.createResource(outVertex.getId().toString()); final Property edge = model.createProperty(label); final Statement statement = model.createStatement(outV, edge, inV); model.add(statement); return new JenaEdge(model, edge, inV, outV); }
protected Model getDefaultModel() { Model model = ModelFactory.createDefaultModel(); Resource resource = model.createResource("", model.createResource("http://example.com/ns#Bug")); resource.addProperty(RDF.type, model.createResource(LDP.RDFSource.stringValue())); resource.addProperty(model.createProperty("http://example.com/ns#severity"), "High"); resource.addProperty(DCTerms.title, "Another bug to test."); resource.addProperty(DCTerms.description, "Issues that need to be fixed."); return model; }
/** * Remove a Resource from the specified model. If property is null, the entire resource will be * removed. Otherwise only the specified property will be removed from the resource. * * @param path2db (location of the TDB store). * @param uri (base URI of the resource): * @param id (resource id). * @param property (property of the resource). can be null. */ public static void removeResource(String path2db, String uri, String id, Property property) { // TDB.setExecutionLogging(InfoLevel.INFO); TDB.getContext().set(TDB.symLogExec, true); rdf_model = TDBFactory.createModel(path2db); if (property == null) { rdf_model.removeAll(rdf_model.createResource(uri + id), null, null); } else { rdf_model.removeAll(rdf_model.createResource(uri + id), property, null); } }
public Resource createGLIFinstance(String type, String label) { Resource r = rdfModel.createResource(KB_NS_PREFIX + "GLIF3_5_BitPathways_Class_" + classIndex); classIndex++; Resource r_type = rdfModel.createResource(GLIF_Factory.KB_NS_PREFIX + type); r.addProperty(RDF.type, r_type); r.addProperty(RDFS.label, label); r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "name"), label); return r; }
public Model getModel() { Model model = ModelFactory.createDefaultModel(); Resource itemNode = model.createResource(uri); for (Prop prop : values) { if (prop.isLiteral()) { itemNode.addLiteral(prop.getType(), model.createLiteral(prop.getValue())); } else { itemNode.addProperty(prop.getType(), model.createResource(prop.getValue())); } } return model; }
public void testQuintetOfQuadlets() { Resource rs = model.createResource(); rs.addProperty(RDF.type, RDF.Statement); model.createResource().addProperty(RDF.value, rs); rs.addProperty(RDF.subject, model.createResource()); rs.addProperty(RDF.predicate, model.createProperty("http://example.org/foo")); rs.addProperty(RDF.object, model.createResource()); rs.addProperty(RDF.object, model.createResource()); StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement s = it.nextStatement(); assertFalse(s.getObject().equals(s.getSubject())); } }
private void createBackLinkViolation(String subjectURI, String resource) { Model m = ModelFactory.createDefaultModel(); Resource subject = m.createResource(resource); m.add(new StatementImpl(subject, QPRO.exceptionDescription, DQM.NoBackLink)); RDFNode violatedTriple = Commons.generateRDFBlankNode(); m.add( new StatementImpl(violatedTriple.asResource(), RDF.subject, m.createResource(subjectURI))); m.add(new StatementImpl(subject, DQM.hasViolatingTriple, violatedTriple)); this._problemList.add(m); }
protected void setUp() throws java.lang.Exception { conn = TestConnection.makeAndCleanTestConnection(); model = ModelRDB.createModel(conn, TestPackage.M_DB); model .createResource() .addProperty(RDF.type, RDFS.Resource) .addProperty(RDFS.label, "foo") .addProperty(RDF.value, "123"); model .createResource() .addProperty(RDF.type, RDFS.Resource) .addProperty(RDFS.label, "bar") .addProperty(RDF.value, "123"); }
/** * If the {@link ConfigurationProperties} has a name for the initial admin user, create the user * and add it to the model. */ protected void createInitialAdminUser(Model model) { String initialAdminUsername = ConfigurationProperties.getProperty("initialAdminUser"); if (initialAdminUsername == null) { return; } // A hard-coded MD5 encryption of "defaultAdmin" String initialAdminPassword = "******"; String vitroDefaultNs = DEFAULT_DEFAULT_NAMESPACE; Resource user = model.createResource(vitroDefaultNs + "defaultAdminUser"); model.add( model.createStatement( user, model.createProperty(VitroVocabulary.RDF_TYPE), model.getResource(VitroVocabulary.USER))); model.add( model.createStatement( user, model.createProperty(VitroVocabulary.USER_USERNAME), model.createTypedLiteral(initialAdminUsername))); model.add( model.createStatement( user, model.createProperty(VitroVocabulary.USER_MD5PASSWORD), model.createTypedLiteral(initialAdminPassword))); model.add( model.createStatement( user, model.createProperty(VitroVocabulary.USER_ROLE), model.createTypedLiteral("role:/50"))); }
public static String RenderTemplatePage(Bindings b, String templateName) throws IOException { MediaType mt = MediaType.TEXT_HTML; Resource config = model.createResource("eh:/root"); Mode prefixMode = Mode.PreferPrefixes; ShortnameService sns = new StandardShortnameService(); List<Resource> noResults = CollectionUtils.list(root.inModel(model)); Graph resultGraph = graphModel.getGraph(); resultGraph.getPrefixMapping().setNsPrefix("api", API.NS); resultGraph.add(Triple.create(root.asNode(), API.items.asNode(), RDF.nil.asNode())); APIResultSet rs = new APIResultSet(resultGraph, noResults, true, true, "details", View.ALL); VelocityRenderer vr = new VelocityRenderer(mt, null, config, prefixMode, sns); VelocityRendering vx = new VelocityRendering(b, rs, vr); VelocityEngine ve = vx.createVelocityEngine(); VelocityContext vc = vx.createVelocityContext(b); ByteArrayOutputStream bos = new ByteArrayOutputStream(); Writer w = new OutputStreamWriter(bos, "UTF-8"); Template t = ve.getTemplate(templateName); t.merge(vc, w); w.close(); return bos.toString(); }
/** * Creates an Observable and includes it in the model * * @param uri URI of the Observable to create * @return */ public Resource createObservable(String uri) { Resource rv = null; Model model = process.getModel(); rv = model.createResource(uri); model.add(rv, RDF.type, RNRM.Observable); return rv; }
/** "dirty" reifications - those with conflicting quadlets - should fail. */ public void testDirtyReification() { Resource R = model.createResource(aURI); model.add(R, RDF.type, RDF.Statement); model.add(R, RDF.subject, S); model.add(R, RDF.subject, P); testDoesNotReify("boo", R); }
/* (non-Javadoc) * @see org.spdx.rdfparser.SPDXLicenseInfo#_createResource(com.hp.hpl.jena.rdf.model.Model) */ @Override protected Resource _createResource(Model model) { Resource type = model.createResource( SPDXAnalysis.SPDX_NAMESPACE + SPDXAnalysis.CLASS_SPDX_CONJUNCTIVE_LICENSE_SET); return super._createResource(model, type); }
@Override public void processInput(Resource input, Resource output) { Printing.print(input.getModel()); // Extract the link to the JSON specification Literal jsonSpecificationURL = input.getProperty(Vocabulary.hasJSONExperimentSpecificationURL).getLiteral(); JSONSpecification jsonSpec = new JSONSpecification(getURL(jsonSpecificationURL)); String jsonSpecID = jsonSpec.getSpecID(); // Create output model Model outputModel = ModelFactory.createDefaultModel(); // Create the UnpostedScenario ResourceURI resourceURI = new ResourceURI(); Resource unpostedScenario = outputModel.createResource( resourceURI.getURI("unpublishedScenario", jsonSpecID).toASCIIString(), Vocabulary.UnpostedScenario); unpostedScenario.addProperty(Vocabulary.hasScenarioLayer, input); unpostedScenario.addLiteral(Vocabulary.hasJSONExperimentSpecificationURL, jsonSpecificationURL); // Merge our temporary output model output.getModel().add(outputModel); // Set output scenario layer to reference parent scenario output.addProperty(Vocabulary.isScenarioLayerOf, unpostedScenario); Printing.print(output.getModel()); }
private void processNode( Resource node, Model queryModel, Model outputModel, Resource parent, Resource tree) { final Set<Resource> children = queryModel.listResourcesWithProperty(RDFS.subClassOf, node).toSet(); final Iterator<Resource> childIterator = children.iterator(); if (children.size() == 1) { this.processNode(childIterator.next(), queryModel, outputModel, parent, tree); } else { final Resource blankNode = outputModel.createResource(); if (parent != null) { outputModel.add(blankNode, Vocab.has_Parent, parent); } else { outputModel.add(tree, Vocab.has_Root, blankNode); } this.addLabel(blankNode, node, outputModel); outputModel.add(blankNode, RDF.type, Vocab.Node); outputModel.add(blankNode, DC.subject, node); if (children.size() > 1) { while (childIterator.hasNext()) { final Resource child = childIterator.next(); this.processNode(child, queryModel, outputModel, blankNode, tree); } } } }
/** * Exports complex concept * * @param thesaurus * @param model * @return */ public Model exportComplexConcept(Thesaurus thesaurus, Model model) { List<SplitNonPreferredTerm> complexConcepts = splitNonPreferredTermService.getSplitNonPreferredTermList( 0, -1, thesaurus.getThesaurusId()); if (!complexConcepts.isEmpty()) { for (SplitNonPreferredTerm complexConcept : complexConcepts) { Resource inScheme = model.createResource(complexConcept.getThesaurus().getIdentifier()); // Add splitNonPreferredTerm resource Resource complexConceptRes = model.createResource(complexConcept.getIdentifier(), ISOTHES.SPLIT_NON_PREFERRED_TERM); model.add(complexConceptRes, SKOS.IN_SCHEME, inScheme); model.add( complexConceptRes, SKOSXL.LITERAL_FORM, StringEscapeUtils.unescapeXml(complexConcept.getLexicalValue()), complexConcept.getLanguage().getId()); model.add( complexConceptRes, DCTerms.created, DateUtil.toISO8601String(complexConcept.getCreated())); model.add( complexConceptRes, DCTerms.modified, DateUtil.toISO8601String(complexConcept.getModified())); model.add(complexConceptRes, ISOTHES.STATUS, complexConcept.getStatus().toString()); if (StringUtils.isNotEmpty(complexConcept.getSource())) { model.add(complexConceptRes, DC.source, complexConcept.getSource()); } // Add compoundEquivalence resource Resource compoundEquivalenceRes = model.createResource(ISOTHES.COMPOUND_EQUIVALENCE); model.add(compoundEquivalenceRes, SKOS.IN_SCHEME, inScheme); model.add(compoundEquivalenceRes, ISOTHES.PLUS_UF, complexConcept.getIdentifier()); for (ThesaurusTerm term : complexConcept.getPreferredTerms()) { model.add(compoundEquivalenceRes, ISOTHES.PLUS_USE, term.getIdentifier()); } } } return model; }
private void createNotValidForwardLink(String resource) { Model m = ModelFactory.createDefaultModel(); Resource subject = m.createResource(resource); m.add(new StatementImpl(subject, QPRO.exceptionDescription, DQM.NotValidForwardLink)); this._problemList.add(m); }
private void testNotReifying(Model m, String uri) { try { m.createResource(uri).as(ReifiedStatement.class); fail("there should be no reifiedStatement for " + uri); } catch (DoesNotReifyException e) { /* that's what we require */ } }
public boolean getData( File tmp, String stateCode, String countyCode, OntModel owlModel, Model pmlModel) { try { Resource usgs = pmlModel.createResource( Ontology.EPA.NS + "USGS", pmlModel.createResource(Ontology.PMLP.Organization)); usgs.addLiteral(RDFS.label, "United States Geological Survey"); Collection<MeasurementSite> data = process(tmp, stateCode, countyCode); for (MeasurementSite m : data) { m.asIndividual(owlModel, pmlModel); } return true; } catch (Exception e) { e.printStackTrace(); return false; } }
public static Statement createStatement(Triple triple) throws OREException { Model model = ModelFactory.createDefaultModel(); Resource resource = model.createResource(triple.getSubjectURI().toString()); Property property = model.createProperty(triple.getPredicate().getURI().toString()); RDFNode node; if (triple.isLiteral()) { node = model.createTypedLiteral(triple.getObjectLiteral()); } else { node = model.createResource(triple.getObjectURI().toString()); } Statement statement = model.createStatement(resource, property, node); return statement; }
public SPDXChecksum(Model spdxModel, Node checksumNode) throws InvalidSPDXAnalysisException { this.model = spdxModel; this.checksumNode = checksumNode; if (checksumNode.isBlank()) { checksumResource = model.createResource(checksumNode.getBlankNodeId()); } else if (checksumNode.isURI()) { checksumResource = model.createResource(checksumNode.getURI()); } else { throw (new InvalidSPDXAnalysisException("Checksum node can not be a literal")); } // Algorithm Node p = spdxModel .getProperty(SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_ALGORITHM) .asNode(); Triple m = Triple.createMatch(checksumNode, p, null); ExtendedIterator<Triple> tripleIter = spdxModel.getGraph().find(m); while (tripleIter.hasNext()) { Triple t = tripleIter.next(); if (t.getObject().isLiteral()) { // The following is for compatibility with rdf generated with older // versions of the tool this.algorithm = t.getObject().toString(false); } else if (t.getObject().isURI()) { this.algorithm = URI_TO_ALGORITHM.get(t.getObject().getURI()); if (this.algorithm == null) { this.algorithm = "UNKNOWN"; } } else { throw (new InvalidSPDXAnalysisException( "Invalid checksum algorithm - must be one of the defined algorithms supported by SPDX.")); } } // value p = spdxModel .getProperty(SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_VALUE) .asNode(); m = Triple.createMatch(checksumNode, p, null); tripleIter = spdxModel.getGraph().find(m); while (tripleIter.hasNext()) { Triple t = tripleIter.next(); this.value = t.getObject().toString(false); } }
public Resource rowColRef(int col, Model pmlModel) { Resource epa = pmlModel.createResource(Ontology.EPA.NS + "EPA"); Resource source = pmlModel.createResource(pmlModel.createResource(Ontology.PMLP.SourceUsage)); Resource frag = pmlModel.createResource(pmlModel.createResource(Ontology.PMLP.DocumentFragmentByRowCol)); Resource document = pmlModel.createResource(src, pmlModel.createResource(Ontology.PMLP.Dataset)); Property prop; // Relate source to fragment prop = pmlModel.createProperty(Ontology.PMLP.hasSource); source.addProperty(prop, frag); // Relate row/col information prop = pmlModel.createProperty(Ontology.PMLP.hasFromCol); frag.addLiteral(prop, col); prop = pmlModel.createProperty(Ontology.PMLP.hasToCol); frag.addLiteral(prop, col); prop = pmlModel.createProperty(Ontology.PMLP.hasFromRow); frag.addLiteral(prop, row); prop = pmlModel.createProperty(Ontology.PMLP.hasToRow); frag.addLiteral(prop, row); // Relate fragment to document prop = pmlModel.createProperty(Ontology.PMLP.hasDocument); frag.addProperty(prop, document); // Relate document to publisher prop = pmlModel.createProperty(Ontology.PMLP.hasPublisher); document.addProperty(prop, epa); return source; }