public Resource createGLIFinstance(String bp_type, String id, AttribGroupSet ags) { Resource r = rdfModel.createResource(KB_NS_PREFIX + "GLIF3_5_BitPathways_Class_" + classIndex); classIndex++; String type = getGLIFtype(bp_type); Resource r_type = rdfModel.createResource(GLIF_Factory.KB_NS_PREFIX + type); r.addProperty(RDF.type, r_type); String label = ags.getSpecialValue("label")[0].toString(); if (label == null) label = ""; r.addProperty(RDFS.label, label); r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "name"), label); if (type.equalsIgnoreCase("Patient_State")) { r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "display_name"), label); } if (bp_type.equalsIgnoreCase("TASK")) { addTaskProperties(r, ags); } else if (bp_type.equalsIgnoreCase("DECISION")) { addDecisionProperties(r, ags); } else if (bp_type.equalsIgnoreCase("START")) { addStartProperties(r, ags); } else if (bp_type.equalsIgnoreCase("END")) { addEndProperties(r, ags); } else if (bp_type.equalsIgnoreCase("SUBPATH")) { addSubpathProperties(r, ags); } stepSet.put(id, r); return r; }
/** * If the {@link ConfigurationProperties} has a name for the initial admin user, create the user * and add it to the model. */ protected void createInitialAdminUser(Model model) { String initialAdminUsername = ConfigurationProperties.getProperty("initialAdminUser"); if (initialAdminUsername == null) { return; } // A hard-coded MD5 encryption of "defaultAdmin" String initialAdminPassword = "******"; String vitroDefaultNs = DEFAULT_DEFAULT_NAMESPACE; Resource user = model.createResource(vitroDefaultNs + "defaultAdminUser"); model.add( model.createStatement( user, model.createProperty(VitroVocabulary.RDF_TYPE), model.getResource(VitroVocabulary.USER))); model.add( model.createStatement( user, model.createProperty(VitroVocabulary.USER_USERNAME), model.createTypedLiteral(initialAdminUsername))); model.add( model.createStatement( user, model.createProperty(VitroVocabulary.USER_MD5PASSWORD), model.createTypedLiteral(initialAdminPassword))); model.add( model.createStatement( user, model.createProperty(VitroVocabulary.USER_ROLE), model.createTypedLiteral("role:/50"))); }
@Test public void shouldSortTriplesForDisplay() { final Model model = createDefaultModel(); model.setNsPrefix("prefix", "namespace"); final Property propertyA = model.createProperty("namespace", "a"); final Property propertyB = model.createProperty("namespace", "b"); final Property propertyC = model.createProperty("c"); final Literal literalA = model.createLiteral("a"); final Literal literalB = model.createLiteral("b"); final Resource resourceB = model.createResource("b"); model.add(resourceB, propertyA, literalA); final Resource a = model.createResource("a"); model.add(a, propertyC, literalA); model.add(a, propertyB, literalA); model.add(a, propertyA, literalA); model.add(a, propertyA, literalB); final Iterator<Quad> iterator = DatasetFactory.create(model).asDatasetGraph().find(); final List<Quad> sortedTriples = testObj.getSortedTriples(model, iterator); sortedTriples.get(0).matches(ANY, a.asNode(), propertyA.asNode(), literalA.asNode()); sortedTriples.get(1).matches(ANY, a.asNode(), propertyA.asNode(), literalB.asNode()); sortedTriples.get(2).matches(ANY, a.asNode(), propertyB.asNode(), literalA.asNode()); sortedTriples.get(3).matches(ANY, a.asNode(), propertyC.asNode(), literalA.asNode()); sortedTriples.get(4).matches(ANY, resourceB.asNode(), propertyC.asNode(), literalA.asNode()); }
private void initializeGraph() { mGraph = new SimpleMGraph(); com.hp.hpl.jena.graph.Graph graph = new JenaGraph(mGraph); Model model = ModelFactory.createModelForGraph(graph); // create the resource // and add the properties cascading style String URI = "http://example.org/"; model .createResource(URI + "A") .addProperty(model.createProperty(URI + "B"), "C") .addProperty( model.createProperty(URI + "D"), model .createResource() .addProperty(model.createProperty(URI + "E"), "F") .addProperty(model.createProperty(URI + "G"), "H")); mGraph.add( new TripleImpl( new UriRef("http://foo/bar"), new UriRef("http://foo/bar"), LiteralFactory.getInstance().createTypedLiteral("foo"))); mGraph.add( new TripleImpl( new UriRef("http://foo/bar"), new UriRef("http://foo/bar"), LiteralFactory.getInstance().createTypedLiteral(54675))); mGraph.add( new TripleImpl(new BNode(), new UriRef("http://foo/bar"), new UriRef("http://foo/bar"))); }
public Configuration(Model configurationModel) { model = configurationModel; StmtIterator it = model.listStatements(null, RDF.type, CONF.Configuration); if (!it.hasNext()) { throw new IllegalArgumentException("No conf:Configuration found in configuration model"); } config = it.nextStatement().getSubject(); datasets = new ArrayList(); it = model.listStatements(config, CONF.dataset, (RDFNode) null); while (it.hasNext()) { datasets.add(new Dataset(it.nextStatement().getResource())); } labelProperties = new ArrayList(); it = model.listStatements(config, CONF.labelProperty, (RDFNode) null); while (it.hasNext()) { labelProperties.add(it.nextStatement().getObject().as(Property.class)); } if (labelProperties.isEmpty()) { labelProperties.add(RDFS.label); labelProperties.add(DC.title); labelProperties.add(model.createProperty("http://xmlns.com/foaf/0.1/name")); } commentProperties = new ArrayList(); it = model.listStatements(config, CONF.commentProperty, (RDFNode) null); while (it.hasNext()) { commentProperties.add(it.nextStatement().getObject().as(Property.class)); } if (commentProperties.isEmpty()) { commentProperties.add(RDFS.comment); commentProperties.add(DC.description); } imageProperties = new ArrayList(); it = model.listStatements(config, CONF.imageProperty, (RDFNode) null); while (it.hasNext()) { imageProperties.add(it.nextStatement().getObject().as(Property.class)); } if (imageProperties.isEmpty()) { imageProperties.add(model.createProperty("http://xmlns.com/foaf/0.1/depiction")); } prefixes = new PrefixMappingImpl(); if (config.hasProperty(CONF.usePrefixesFrom)) { it = config.listProperties(CONF.usePrefixesFrom); while (it.hasNext()) { Statement stmt = it.nextStatement(); prefixes.setNsPrefixes(FileManager.get().loadModel(stmt.getResource().getURI())); } } else { prefixes.setNsPrefixes(model); } if (prefixes.getNsURIPrefix(CONF.NS) != null) { prefixes.removeNsPrefix(prefixes.getNsURIPrefix(CONF.NS)); } }
public void testIsReified() { ReifiedStatement rs = model.createReifiedStatement(aURI, SPO); Resource BS = model.createResource(anchor + "BS"); Property BP = model.createProperty(anchor + "BP"); RDFNode BO = model.createProperty(anchor + "BO"); model.add(rs, P, O); assertTrue("st should be reified now", SPO.isReified()); assertTrue("m should have st reified now", model.isReified(SPO)); assertFalse( "this new statement should not be reified", model.createStatement(BS, BP, BO).isReified()); }
private void addDidacticsTextMaterial( Resource step, String purpose, String mime, String text, int index) { Resource r = createGLIFinstance("Supplemental_Material_List", "info_" + (classIndex + 1)); r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "purpose"), purpose); Resource r2 = createGLIFinstance("Text_Material", "info_text_" + (classIndex + 1) + "_" + index); r2.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "MIME_type_format"), mime); r2.addProperty( rdfModel.createProperty(KB_NS_PREFIX + "material"), revivePolishCharacters(text)); r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "items"), r2); step.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "didactics"), r); }
public Resource rowColRef(int col, Model pmlModel) { Resource epa = pmlModel.createResource(Ontology.EPA.NS + "EPA"); Resource source = pmlModel.createResource(pmlModel.createResource(Ontology.PMLP.SourceUsage)); Resource frag = pmlModel.createResource(pmlModel.createResource(Ontology.PMLP.DocumentFragmentByRowCol)); Resource document = pmlModel.createResource(src, pmlModel.createResource(Ontology.PMLP.Dataset)); Property prop; // Relate source to fragment prop = pmlModel.createProperty(Ontology.PMLP.hasSource); source.addProperty(prop, frag); // Relate row/col information prop = pmlModel.createProperty(Ontology.PMLP.hasFromCol); frag.addLiteral(prop, col); prop = pmlModel.createProperty(Ontology.PMLP.hasToCol); frag.addLiteral(prop, col); prop = pmlModel.createProperty(Ontology.PMLP.hasFromRow); frag.addLiteral(prop, row); prop = pmlModel.createProperty(Ontology.PMLP.hasToRow); frag.addLiteral(prop, row); // Relate fragment to document prop = pmlModel.createProperty(Ontology.PMLP.hasDocument); frag.addProperty(prop, document); // Relate document to publisher prop = pmlModel.createProperty(Ontology.PMLP.hasPublisher); document.addProperty(prop, epa); return source; }
public void setUp() { model = getModel(); Resource S2 = model.createResource(anchor + "subject2"); S = model.createResource(anchor + "subject"); P = model.createProperty(anchor + "predicate"); O = model.createLiteral(anchor + "object"); SPO = model.createStatement(S, P, O); SPO2 = model.createStatement(S2, P, O); }
public Resource createGLIFinstanceMaintenance(Metadata m) { Resource r = rdfModel.createResource(KB_NS_PREFIX + "GLIF3_5_BitPathways_Class_" + classIndex); classIndex++; Resource r_type = rdfModel.createResource(GLIF_Factory.KB_NS_PREFIX + "Maintenance_Info"); r.addProperty(RDF.type, r_type); r.addProperty( rdfModel.createProperty(GLIF_Factory.KB_NS_PREFIX + "title"), m.getTitleMetadata().getName()); // r.addProperty(rdfModel.createProperty(GLIF_Factory.KB_NS_PREFIX+"authoring_date"), // m.getLastModifiedMetadata().getDate()); r.addProperty( rdfModel.createProperty(GLIF_Factory.KB_NS_PREFIX + "author"), m.getAuthorsMetadata()[0].getName()); return r; }
@SuppressWarnings("unused") private static final class Vocab { private static Model m_model = ModelFactory.createDefaultModel(); public static final Property part_of = m_model.createProperty("http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#part_of"); public static final Property belongs_to_Edge_as_Child = m_model.createProperty( "http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#belongs_to_Edge_as_Child"); public static final Property has_Root = m_model.createProperty("http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#has_Root"); public static final Property has_Node = m_model.createProperty("http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#has_Node"); public static final Property has_Child_Node = m_model.createProperty( "http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#has_Child_Node"); public static final Property has_Annotation = m_model.createProperty( "http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#has_Annotation"); public static final Property subtree_of = m_model.createProperty("http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#subtree_of"); public static final Property has_Parent_Node = m_model.createProperty( "http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#has_Parent_Node"); public static final Property has = m_model.createProperty("http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#has"); public static final Property has_Parent = m_model.createProperty("http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#has_Parent"); public static final Resource RootedTree = m_model.createResource("http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#RootedTree"); public static final Resource EdgeAnnotation = m_model.createResource( "http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#EdgeAnnotation"); public static final Resource Node = m_model.createResource("http://www.evolutionaryontology.org/cdao/1.0/cdao.owl#Node"); public static final Resource Thing = m_model.createResource("http://www.w3.org/2002/07/owl#Thing"); public static final Resource TipCollection = m_model.createResource( "http://cdao.svn.sourceforge.net/svnroot/cdao/trunk/ontology/phylotastic/phylotastic_sadi.owl#TipCollection"); public static final Resource TaxonomyRoot = m_model.createResource("http://purl.obolibrary.org/obo/NCBITaxon_1"); }
@Override public Edge addEdge( final Object id, final Vertex outVertex, final Vertex inVertex, final String label) { final Resource inV = model.createResource(inVertex.getId().toString()); final Resource outV = model.createResource(outVertex.getId().toString()); final Property edge = model.createProperty(label); final Statement statement = model.createStatement(outV, edge, inV); model.add(statement); return new JenaEdge(model, edge, inV, outV); }
protected Model getDefaultModel() { Model model = ModelFactory.createDefaultModel(); Resource resource = model.createResource("", model.createResource("http://example.com/ns#Bug")); resource.addProperty(RDF.type, model.createResource(LDP.RDFSource.stringValue())); resource.addProperty(model.createProperty("http://example.com/ns#severity"), "High"); resource.addProperty(DCTerms.title, "Another bug to test."); resource.addProperty(DCTerms.description, "Issues that need to be fixed."); return model; }
public Resource createGLIFinstance(String type, String label) { Resource r = rdfModel.createResource(KB_NS_PREFIX + "GLIF3_5_BitPathways_Class_" + classIndex); classIndex++; Resource r_type = rdfModel.createResource(GLIF_Factory.KB_NS_PREFIX + type); r.addProperty(RDF.type, r_type); r.addProperty(RDFS.label, label); r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "name"), label); return r; }
/** * Vocabulary definitions from * /home/reto/workspace/wymiwyg-commons-jena/src/org/wymiwyg/commons/vocabulary/foaf-extensions.rdf * * @author Auto-generated by schemagen on 31 Mai 2007 01:33 */ public class FOAFEX { /** The RDF model that holds the vocabulary terms */ private static Model m_model = ModelFactory.createDefaultModel(); /** The namespace of the vocabulary as a string */ public static final String NS = "http://wymiwyg.org/ontologies/foaf/extensions#"; /** * The namespace of the vocabulary as a string * * @see #NS */ public static String getURI() { return NS; } /** The namespace of the vocabulary as a resource */ public static final Resource NAMESPACE = m_model.createResource(NS); /** * Indicates a document which has the subject as primary-topic and is a * foaf:PersonalProfileDocument */ public static final Property personalProfileDocument = m_model.createProperty( "http://wymiwyg.org/ontologies/foaf/extensions#personalProfileDocument"); /** Indicates a document which has the subject as primary-topic */ public static final Property agentDescriptionDocument = m_model.createProperty( "http://wymiwyg.org/ontologies/foaf/extensions#agentDescriptionDocument"); /** inverse of foaf:knows */ public static final Property knownBy = m_model.createProperty("http://wymiwyg.org/ontologies/foaf/extensions#knownBy"); /** A document with a foaf:Agent as foaf:primarySubject */ public static final Resource AgentProfileDocument = m_model.createResource("http://wymiwyg.org/ontologies/foaf/extensions#AgentProfileDocument"); }
/** * Create a triple for a new node being created. * * @param oMeetingEvent the event whose data to upload. * @param model the model to add the data to. */ private synchronized void addNode( MeetingEvent oMeetingEvent, com.hp.hpl.jena.rdf.model.Model model) { NodeSummary oNode = oMeetingEvent.getNode(); if (oNode == null) { return; } String sNodeID = oNode.getId(); Resource oResNode = model.createResource(oMeetingEvent.getMeetingID() + "-" + sNodeID); // $NON-NLS-1$ Property type = model.createProperty(RDF_NS, "type"); // $NON-NLS-1$ oResNode.addProperty(type, model.createResource(MEMETIC_NS + "Compendium-Node")); // $NON-NLS-1$ int nNodeType = oNode.getType(); String sTripleStoreString = UINodeTypeManager.getTripleStoreDescription(nNodeType); oResNode.addProperty(type, model.createResource(MEMETIC_NS + sTripleStoreString)); if (nNodeType == ICoreConstants.REFERENCE_SHORTCUT) { oResNode.addProperty( model.createProperty(MEMETIC_NS, "has-reference"), oNode.getSource()); // $NON-NLS-1$ } // ADD LABEL oResNode.addProperty( model.createProperty(MEMETIC_NS, "has-label"), oNode.getLabel()); // $NON-NLS-1$ // ADD IF HAS TRIPLESTORE ID String sOriginalID = oNode.getOriginalID(); if (sOriginalID.startsWith("TS:") && !(nNodeType == ICoreConstants.REFERENCE || nNodeType == ICoreConstants.REFERENCE)) { // $NON-NLS-1$ int ind = sOriginalID.indexOf(":"); // $NON-NLS-1$ sOriginalID = sOriginalID.substring(ind + 1); Property has_original_id = model.createProperty(MEMETIC_NS, "has-original-id"); // $NON-NLS-1$ Resource original_id = model.createResource(sOriginalID); oResNode.addProperty(has_original_id, original_id); } }
public static com.hp.hpl.jena.rdf.model.Model getDeleteModel(String id, long timestamp) { com.hp.hpl.jena.rdf.model.Model deleteModel = ModelFactory.createDefaultModel(); com.hp.hpl.jena.rdf.model.Resource subject = null; com.hp.hpl.jena.rdf.model.Property property = null; com.hp.hpl.jena.rdf.model.Statement statement = null; subject = deleteModel.createResource("mo:" + DataStore.encodeURL(id)); property = deleteModel.createProperty("http://www.modaclouds.eu/rdfs/1.0/deletedmodel#id"); statement = deleteModel.createLiteralStatement(subject, property, id); deleteModel.add(statement); subject = deleteModel.createResource("mo:" + DataStore.encodeURL(id)); property = deleteModel.createProperty("http://www.modaclouds.eu/rdfs/1.0/deletedmodel#timestamp"); statement = deleteModel.createLiteralStatement(subject, property, timestamp); deleteModel.add(statement); return deleteModel; }
/** * Create the model and the meeting data for the given meeting id. * * @param oMeeting the object holding the meeting data. * @param model the model to add the data to. */ public synchronized void addMeetingData(Meeting oMeeting, com.hp.hpl.jena.rdf.model.Model model) { Resource meeting = model.createResource(oMeeting.getMeetingID()); meeting.addProperty( model.createProperty(MEETING_NS, "has-transcription"), model.createResource( oMeeting.getMeetingID() + "-" + oMeeting.getMeetingMapID())); // $NON-NLS-1$ //$NON-NLS-2$ // Define the map owner, 'person', Reource and add their type, name, and if Compendium created // (always 'true'). UserProfile oUser = oMeeting.getUser(); if (oUser == null) { oUser = ProjectCompendium.APP.getModel().getUserProfile(); } Resource person = model.createResource(MEMETIC_STUB + oUser.getId()); person.addProperty( model.createProperty(RDF_NS, "type"), model.createResource(PORTAL_NS + "Person")); // $NON-NLS-1$ //$NON-NLS-2$ person.addProperty( model.createProperty(PORTAL_NS, "full-name"), oUser.getUserName()); // $NON-NLS-1$ person.addProperty( model.createProperty(MEMETIC_NS, "is-compendium-created"), "true"); //$NON-NLS-1$ //$NON-NLS-2$ // UPLOAD THE DATA ABOUT THE MEETING MAP NODE ITSELF MeetingEvent oMeetingEvent = new MeetingEvent( oMeeting.getMeetingID(), false, MeetingEvent.NODE_ADDED_EVENT, (View) oMeeting.getMapNode(), oMeeting.getMapNode()); addNode(oMeetingEvent, model); }
/** * Creates a resource from this SPDX Checksum * * @param model * @return */ public Resource createResource(Model model) { this.model = model; Resource type = model.createResource( SpdxRdfConstants.SPDX_NAMESPACE + SpdxRdfConstants.CLASS_SPDX_CHECKSUM); Resource r = model.createResource(type); if (algorithm != null) { Property algProperty = model.createProperty( SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_ALGORITHM); Resource algResource = model.createResource(ALGORITHM_TO_URI.get(algorithm)); r.addProperty(algProperty, algResource); } if (this.value != null) { Property valueProperty = model.createProperty( SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_VALUE); r.addProperty(valueProperty, this.value); } this.checksumNode = r.asNode(); this.checksumResource = r; return r; }
public void testQuintetOfQuadlets() { Resource rs = model.createResource(); rs.addProperty(RDF.type, RDF.Statement); model.createResource().addProperty(RDF.value, rs); rs.addProperty(RDF.subject, model.createResource()); rs.addProperty(RDF.predicate, model.createProperty("http://example.org/foo")); rs.addProperty(RDF.object, model.createResource()); rs.addProperty(RDF.object, model.createResource()); StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement s = it.nextStatement(); assertFalse(s.getObject().equals(s.getSubject())); } }
/** @param value the value to set */ public void setValue(String value) { this.value = value; if (this.model != null && this.checksumNode != null) { // delete any previous value Property p = model.getProperty(SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_VALUE); model.removeAll(checksumResource, p, null); // add the property p = model.createProperty( SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_VALUE); checksumResource.addProperty(p, value); } }
public static com.hp.hpl.jena.rdf.model.Model getNameModel(String graphUri, long timestamp) { com.hp.hpl.jena.rdf.model.Model nameModel = ModelFactory.createDefaultModel(); com.hp.hpl.jena.rdf.model.Resource subject = null; com.hp.hpl.jena.rdf.model.Property property = null; com.hp.hpl.jena.rdf.model.Statement statement = null; subject = nameModel.createResource(graphUri); property = nameModel.createProperty("mo:timestamp"); statement = nameModel.createLiteralStatement(subject, property, timestamp); nameModel.add(statement); return nameModel; }
/** * @param algorithm the algorithm to set * @throws InvalidSPDXAnalysisException */ public void setAlgorithm(String algorithm) throws InvalidSPDXAnalysisException { this.algorithm = algorithm; if (this.model != null && this.checksumNode != null) { Resource algResource = algorithmStringToResource(algorithm, this.model); // delete any previous algorithm Property p = model.getProperty( SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_ALGORITHM); model.removeAll(checksumResource, p, null); // add the property p = model.createProperty( SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_ALGORITHM); checksumResource.addProperty(p, algResource); } }
public static Statement createStatement(Triple triple) throws OREException { Model model = ModelFactory.createDefaultModel(); Resource resource = model.createResource(triple.getSubjectURI().toString()); Property property = model.createProperty(triple.getPredicate().getURI().toString()); RDFNode node; if (triple.isLiteral()) { node = model.createTypedLiteral(triple.getObjectLiteral()); } else { node = model.createResource(triple.getObjectURI().toString()); } Statement statement = model.createStatement(resource, property, node); return statement; }
void writeRDF( String date, String title, String id, List<String> cls, String DURI, List<Person> auth, String abst, String doi, String file) { Model model = ModelFactory.createDefaultModel(); model.setNsPrefix("acm", "http://acm.rkbexplorer.com/ontologies/acm#"); model.setNsPrefix("foaf", FOAF.NS); model.setNsPrefix("dcterms", DCTerms.NS); Property Class = model.createProperty(acm + "class"); Resource doc1 = model.createResource(DURI); if (id != null) doc1.addProperty(DCTerms.identifier, id); if (title != null) doc1.addProperty(DCTerms.title, title); if (abst != null) doc1.addProperty(DCTerms.abstract_, abst); if (date != null) doc1.addProperty(DCTerms.date, date, XSDDateType.XSDdate); for (int i = 0; i < auth.size(); i++) { doc1.addProperty( DCTerms.creator, model .createResource(FOAF.Person) .addProperty(FOAF.firstName, auth.get(i).getForename()) .addProperty(FOAF.family_name, auth.get(i).getSurname())); } if (cls != null) { for (int i = 0; i < cls.size(); i++) { doc1.addProperty(Class, cls.get(i)); } } try { FileOutputStream out = new FileOutputStream(file); model.write(out, "RDF/XML-ABBREV"); } catch (IOException e) { System.out.println("Exception caught" + e.getMessage()); } }
/** * Check that a predicate for which no shortnames are defined in name map still gets a term * binding in the metadata. */ @Test public void testTermBindingsCoverAllPredicates() throws URISyntaxException { Resource thisPage = ResourceFactory.createResource("elda:thisPage"); String pageNumber = "1"; Bindings cc = new Bindings(); URI reqURI = new URI(""); // EndpointDetails spec = new EndpointDetails() { @Override public boolean isListEndpoint() { return true; } @Override public boolean hasParameterBasedContentNegotiation() { return false; } }; EndpointMetadata em = new EndpointMetadata(spec, thisPage, pageNumber, cc, reqURI); // PrefixMapping pm = PrefixMapping.Factory.create().setNsPrefix("this", "http://example.com/root#"); Model toScan = ModelIOUtils.modelFromTurtle(":a <http://example.com/root#predicate> :b."); toScan.setNsPrefixes(pm); Resource predicate = toScan.createProperty("http://example.com/root#predicate"); Model meta = ModelFactory.createDefaultModel(); Resource exec = meta.createResource("fake:exec"); ShortnameService sns = new StandardShortnameService(); // APIEndpoint.Request r = new APIEndpoint.Request( new Controls(), reqURI, cc ); CompleteContext c = new CompleteContext(CompleteContext.Mode.PreferPrefixes, sns.asContext(), pm) .include(toScan); em.addTermBindings(toScan, meta, exec, c); @SuppressWarnings("unused") Map<String, String> termBindings = c.Do(); Resource tb = meta.listStatements(null, API.termBinding, Any).nextStatement().getResource(); assertTrue(meta.contains(tb, API.label, "this_predicate")); assertTrue(meta.contains(tb, API.property, predicate)); }
private static com.hp.hpl.jena.rdf.model.Model defaultGraphStatement( String graphUrl, long timestamp, String method) { if (method == null) return null; com.hp.hpl.jena.rdf.model.Model m = ModelFactory.createDefaultModel(); com.hp.hpl.jena.rdf.model.Resource subject = null; com.hp.hpl.jena.rdf.model.Property property = null; com.hp.hpl.jena.rdf.model.Statement statement = null; subject = m.createResource(graphUrl); property = m.createProperty("mo:timestamp"); statement = m.createLiteralStatement(subject, property, timestamp); m.add(statement); return m; }
/** * Upload the given model to the triplestore * * @param model, the model to upload. * @param sMeetingID the id of the meeting whose data is being uploaded. * @throws MalformedURLException if the urls used to create the HttpRemove and HttpAdd is * malformed. */ public void uploadModel(com.hp.hpl.jena.rdf.model.Model oModel, String sMeetingID) throws MalformedURLException { // System.out.println("About to try and upload: "+oModel.toString()); com.hp.hpl.jena.rdf.model.Model oInnerModel = ModelFactory.createDefaultModel(); Resource meeting = oInnerModel.createResource(sMeetingID); Property comp_is_proc = oInnerModel.createProperty(MEMETIC_NS, "compendium-is-processed"); // $NON-NLS-1$ meeting.addProperty(comp_is_proc, "true"); // $NON-NLS-1$ HttpRemove removeOp = new HttpRemove(sUrl); removeOp.setModel(oInnerModel); removeOp.exec(); oInnerModel.close(); HttpAdd addOp = new HttpAdd(sUrl); addOp.setModel(oModel); addOp.exec(); }
void runTestAsk(Query query, QueryExecution qe) throws Exception { boolean result = qe.execAsk(); if (results != null) { if (results.isBoolean()) { boolean b = results.getBooleanResult(); assertEquals("ASK test results do not match", b, result); } else { Model resultsAsModel = results.getModel(); StmtIterator sIter = results.getModel().listStatements(null, RDF.type, ResultSetGraphVocab.ResultSet); if (!sIter.hasNext()) throw new QueryTestException("Can't find the ASK result"); Statement s = sIter.nextStatement(); if (sIter.hasNext()) throw new QueryTestException("Too many result sets in ASK result"); Resource r = s.getSubject(); Property p = resultsAsModel.createProperty(ResultSetGraphVocab.getURI() + "boolean"); boolean x = r.getRequiredProperty(p).getBoolean(); if (x != result) assertEquals("ASK test results do not match", x, result); } } return; }
@Override public void process(final Model model) { String identifier = null; try { identifier = model .listObjectsOfProperty(model.createProperty(filenameUtil.property)) .next() .toString(); LOG.debug("Going to store identifier=" + identifier); } catch (NoSuchElementException e) { LOG.warn("No identifier => cannot derive a filename for " + model.toString()); return; } String directory = identifier; if (directory.length() >= filenameUtil.endIndex) { directory = directory.substring(filenameUtil.startIndex, filenameUtil.endIndex); } final String file = FilenameUtils.concat( filenameUtil.target, FilenameUtils.concat( directory + File.separator, identifier + "." + filenameUtil.fileSuffix)); LOG.debug("Write to " + file); filenameUtil.ensurePathExists(file); try (final Writer writer = new OutputStreamWriter(new FileOutputStream(file), filenameUtil.encoding)) { final StringWriter tripleWriter = new StringWriter(); RDFDataMgr.write(tripleWriter, model, this.serialization); IOUtils.write(tripleWriter.toString(), writer); writer.close(); } catch (IOException e) { e.printStackTrace(); throw new MetafactureException(e); } }