/** * Create a new RDF document resource * * @param model * @param uri * @param title * @param description * @param creator * @param license * @param dateSubmitted * @param dateAccepted * @param subject * @param language * @param contributors * @param publishers * @return the new created resource */ public static Resource createDocument( OntModel model, String uri, String title, String description, Resource creator, String license, String dateSubmitted, String dateAccepted, String subject, String language, Resource[] contributors, Resource[] publishers) { Resource doc = model.createResource(uri); doc.addProperty(RDF.type, QualiPSoDOC.TextDocument); doc.addProperty(QualiPSoDOC.title, title); doc.addProperty(QualiPSoDOC.description, description); doc.addProperty(QualiPSoDOC.creator, creator); doc.addProperty(QualiPSoDOC.license, license); doc.addProperty(QualiPSoDOC.dateSubmitted, dateSubmitted); doc.addProperty(QualiPSoDOC.dateAccepted, dateAccepted); doc.addProperty(QualiPSoDOC.subject, subject); doc.addProperty(QualiPSoDOC.language, language); for (int i = 0; i < contributors.length; i++) { doc.addProperty(QualiPSoDOC.contributor, contributors[i]); } for (int i = 0; i < publishers.length; i++) { doc.addProperty(QualiPSoDOC.publisher, publishers[i]); } return doc; }
@Override public void processInput(Resource input, Resource output) { Printing.print(input.getModel()); // Extract the link to the JSON specification Literal jsonSpecificationURL = input.getProperty(Vocabulary.hasJSONExperimentSpecificationURL).getLiteral(); JSONSpecification jsonSpec = new JSONSpecification(getURL(jsonSpecificationURL)); String jsonSpecID = jsonSpec.getSpecID(); // Create output model Model outputModel = ModelFactory.createDefaultModel(); // Create the UnpostedScenario ResourceURI resourceURI = new ResourceURI(); Resource unpostedScenario = outputModel.createResource( resourceURI.getURI("unpublishedScenario", jsonSpecID).toASCIIString(), Vocabulary.UnpostedScenario); unpostedScenario.addProperty(Vocabulary.hasScenarioLayer, input); unpostedScenario.addLiteral(Vocabulary.hasJSONExperimentSpecificationURL, jsonSpecificationURL); // Merge our temporary output model output.getModel().add(outputModel); // Set output scenario layer to reference parent scenario output.addProperty(Vocabulary.isScenarioLayerOf, unpostedScenario); Printing.print(output.getModel()); }
private Model createModel() { Model cubeData = ModelFactory.createDefaultModel(); Resource structure = cubeData.createResource(STRUCTURE, QB.DataStructureDefinition); Resource c1 = cubeData.createResource(STRUCTURE + "/c1", QB.ComponentSpecification); c1.addProperty(RDFS.label, cubeData.createLiteral("Component Specification of Class", "en")); c1.addProperty(QB.dimension, GK.DIM.Class); Resource c2 = cubeData.createResource(STRUCTURE + "/c2", QB.ComponentSpecification); c2.addProperty(QB.measure, GK.MEASURE.PropertyCount); c2.addProperty( RDFS.label, cubeData.createLiteral("Component Specification of Number of Properties", "en")); // Resource c3 = cubeData.createResource(STRUCTURE+"/c3",QB.ComponentSpecification); // c3.addProperty(RDFS.label, cubeData.createLiteral("Component Specification of Instance", // "en")); // c3.addProperty(QB.dimension, GK.DIM.Instance); structure.addProperty(QB.component, c1); structure.addProperty(QB.component, c2); // structure.addProperty(QB.component, c3); cubeData.add(GK.DIM.ClassStatements); cubeData.add(GK.DIM.PropertyStatements); cubeData.add(GK.DIM.InstanceStatements); cubeData.add(GK.MEASURE.PropertyCountStatements); return cubeData; }
private Model execute(Model inputModel, String endpoint) { Model cube = createModel(); Resource dataset; Calendar calendar = Calendar.getInstance(TimeZone.getDefault()); dataset = cube.createResource( GK.uri + "Properties_per_Class" + calendar.getTimeInMillis(), QB.Dataset); dataset.addLiteral(RDFS.comment, "Properties per class"); dataset.addLiteral(DCTerms.date, cube.createTypedLiteral(calendar)); dataset.addLiteral(DCTerms.publisher, "R & D, Unister GmbH, Geoknow"); dataset.addProperty(QB.structure, cube.createResource(STRUCTURE)); QueryExecution qExec; if (inputModel != null) { qExec = QueryExecutionFactory.create(INSTANCES, inputModel); } else { qExec = QueryExecutionFactory.sparqlService(endpoint, INSTANCES, defaultGraphs, defaultGraphs); } ResultSet result = qExec.execSelect(); int i = 0; while (result.hasNext()) { Resource owlClass = result.next().getResource("class"); NUMBER_OF_PROPERTIES.setIri("class", owlClass.getURI()); QueryExecution propertiesQexec; if (inputModel != null) { propertiesQexec = QueryExecutionFactory.create(NUMBER_OF_PROPERTIES.asQuery(), inputModel); } else { propertiesQexec = QueryExecutionFactory.sparqlService( endpoint, NUMBER_OF_PROPERTIES.asQuery(), defaultGraphs, defaultGraphs); System.out.println(NUMBER_OF_PROPERTIES.asQuery()); } try { ResultSet propertiesResult = propertiesQexec.execSelect(); if (propertiesResult.hasNext()) { System.out.println(i); Resource obs = cube.createResource( "http://www.geoknow.eu/data-cube/metric2/observation" + i, QB.Observation); obs.addProperty(QB.dataset, dataset); obs.addProperty(GK.DIM.Class, owlClass); obs.addLiteral(GK.MEASURE.PropertyCount, propertiesResult.next().getLiteral("count")); i++; } } catch (Exception e) { System.out.println(i); Resource obs = cube.createResource( "http://www.geoknow.eu/data-cube/metric2/observation" + i, QB.Observation); obs.addProperty(QB.dataset, dataset); obs.addProperty(GK.DIM.Class, owlClass); obs.addLiteral(GK.MEASURE.PropertyCount, -1); obs.addLiteral(RDFS.comment, e.getMessage()); i++; } } return cube; }
public Resource createGLIFinstance(String bp_type, String id, AttribGroupSet ags) { Resource r = rdfModel.createResource(KB_NS_PREFIX + "GLIF3_5_BitPathways_Class_" + classIndex); classIndex++; String type = getGLIFtype(bp_type); Resource r_type = rdfModel.createResource(GLIF_Factory.KB_NS_PREFIX + type); r.addProperty(RDF.type, r_type); String label = ags.getSpecialValue("label")[0].toString(); if (label == null) label = ""; r.addProperty(RDFS.label, label); r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "name"), label); if (type.equalsIgnoreCase("Patient_State")) { r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "display_name"), label); } if (bp_type.equalsIgnoreCase("TASK")) { addTaskProperties(r, ags); } else if (bp_type.equalsIgnoreCase("DECISION")) { addDecisionProperties(r, ags); } else if (bp_type.equalsIgnoreCase("START")) { addStartProperties(r, ags); } else if (bp_type.equalsIgnoreCase("END")) { addEndProperties(r, ags); } else if (bp_type.equalsIgnoreCase("SUBPATH")) { addSubpathProperties(r, ags); } stepSet.put(id, r); return r; }
public Resource rowColRef(int col, Model pmlModel) { Resource epa = pmlModel.createResource(Ontology.EPA.NS + "EPA"); Resource source = pmlModel.createResource(pmlModel.createResource(Ontology.PMLP.SourceUsage)); Resource frag = pmlModel.createResource(pmlModel.createResource(Ontology.PMLP.DocumentFragmentByRowCol)); Resource document = pmlModel.createResource(src, pmlModel.createResource(Ontology.PMLP.Dataset)); Property prop; // Relate source to fragment prop = pmlModel.createProperty(Ontology.PMLP.hasSource); source.addProperty(prop, frag); // Relate row/col information prop = pmlModel.createProperty(Ontology.PMLP.hasFromCol); frag.addLiteral(prop, col); prop = pmlModel.createProperty(Ontology.PMLP.hasToCol); frag.addLiteral(prop, col); prop = pmlModel.createProperty(Ontology.PMLP.hasFromRow); frag.addLiteral(prop, row); prop = pmlModel.createProperty(Ontology.PMLP.hasToRow); frag.addLiteral(prop, row); // Relate fragment to document prop = pmlModel.createProperty(Ontology.PMLP.hasDocument); frag.addProperty(prop, document); // Relate document to publisher prop = pmlModel.createProperty(Ontology.PMLP.hasPublisher); document.addProperty(prop, epa); return source; }
public Resource getRDFResource() { OntModel vocab = OntologyFactory.getTalenticaOntology(); Resource price = vocab.createResource(getUri()); price.addProperty(vocab.getProperty(AMNT_URI), String.valueOf(this.amount)); price.addProperty(vocab.getProperty(CURRENCY_URI), this.currency); return price; }
public static Resource createFigure(OntModel model, Resource creator, String title) { // Anonymous figure node Resource figure = model.createResource(); figure.addProperty(RDF.type, QualiPSoDOC.Figure); figure.addProperty(QualiPSoDOC.title, title); figure.addProperty(QualiPSoDOC.creator, creator); return figure; }
/** * Create a new RDF folder resource * * @param model * @param uri * @param title * @param description * @param creator * @return the new created resource */ public static Resource createFolder( OntModel model, String uri, String title, String description, Resource creator) { Resource folder = model.createResource(uri); folder.addProperty(RDF.type, QualiPSoDOC.Folder); folder.addProperty(QualiPSoDOC.title, title); folder.addProperty(QualiPSoDOC.description, description); folder.addProperty(QualiPSoDOC.creator, creator); return folder; }
protected Model getDefaultModel() { Model model = ModelFactory.createDefaultModel(); Resource resource = model.createResource("", model.createResource("http://example.com/ns#Bug")); resource.addProperty(RDF.type, model.createResource(LDP.RDFSource.stringValue())); resource.addProperty(model.createProperty("http://example.com/ns#severity"), "High"); resource.addProperty(DCTerms.title, "Another bug to test."); resource.addProperty(DCTerms.description, "Issues that need to be fixed."); return model; }
public OntClass asOntClass(OntModel owlModel, Model pmlModel) { // int col = cmpOpCol(testNumber); Individual elem = owlModel.createIndividual(Ontology.EPA.NS + elementName, Ontology.Element(owlModel)); OntClass elemRestrict = owlModel.createHasValueRestriction(null, Ontology.hasElement(owlModel), elem); // OntClass testRestrict = // owlModel.createClass(Ontology.EPA.NS+"EPA-"+elementName+"-test-"+testNumber); Literal test = owlModel.createTypedLiteral(testNumber); OntClass testRestrict = owlModel.createHasValueRestriction(null, Ontology.hasTestNumber(owlModel), test); Resource x = owlModel.createResource( Ontology.EPA.NS + "EPA-" + elementName + "-Threshold-" + testNumber); String op = null; switch (cmpType) { case 0: op = "minInclusive"; break; case 1: op = "minExclusive"; break; case 3: op = "maxExclusive"; break; case 4: op = "maxInclusive"; break; default: System.err.println("unknown cmp type"); } x.addLiteral(owlModel.createProperty(XSD.getURI() + op), Double.parseDouble(cmpValue)); RDFList withRestrict = owlModel.createList(new RDFNode[] {x}); Resource y = owlModel.createResource(RDFS.Datatype); y.addProperty(OWL2.withRestrictions, withRestrict); y.addProperty(OWL2.onDatatype, XSD.xdouble); OntClass valueRestrict = owlModel.createSomeValuesFromRestriction(null, Ontology.hasValue(owlModel), y); RDFNode[] components = new RDFNode[4]; components[0] = Ontology.FacilityMeasurement(owlModel); components[2] = valueRestrict; components[3] = elemRestrict; components[1] = testRestrict; RDFList intersect = owlModel.createList(components); OntClass self = owlModel.createIntersectionClass( Ontology.EPA.NS + "EPA-Excessive-" + elementName + "-Measurement-" + testNumber, intersect); self.addProperty(RDFS.subClassOf, Ontology.Violation(owlModel)); return self; }
public Resource createGLIFinstance(String type, String label) { Resource r = rdfModel.createResource(KB_NS_PREFIX + "GLIF3_5_BitPathways_Class_" + classIndex); classIndex++; Resource r_type = rdfModel.createResource(GLIF_Factory.KB_NS_PREFIX + type); r.addProperty(RDF.type, r_type); r.addProperty(RDFS.label, label); r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "name"), label); return r; }
public static Model createModel() { Model m = ModelFactory.createDefaultModel(); Resource r1 = m.createResource("http://example.org/book#1"); Resource r2 = m.createResource("http://example.org/book#2"); r1.addProperty(DC.title, "SPARQL - the book") .addProperty(DC.description, "A book about SPARQL"); r2.addProperty(DC.title, "Advanced techniques for SPARQL"); return m; }
public void testQuintetOfQuadlets() { Resource rs = model.createResource(); rs.addProperty(RDF.type, RDF.Statement); model.createResource().addProperty(RDF.value, rs); rs.addProperty(RDF.subject, model.createResource()); rs.addProperty(RDF.predicate, model.createProperty("http://example.org/foo")); rs.addProperty(RDF.object, model.createResource()); rs.addProperty(RDF.object, model.createResource()); StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement s = it.nextStatement(); assertFalse(s.getObject().equals(s.getSubject())); } }
private void addDidacticsTextMaterial( Resource step, String purpose, String mime, String text, int index) { Resource r = createGLIFinstance("Supplemental_Material_List", "info_" + (classIndex + 1)); r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "purpose"), purpose); Resource r2 = createGLIFinstance("Text_Material", "info_text_" + (classIndex + 1) + "_" + index); r2.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "MIME_type_format"), mime); r2.addProperty( rdfModel.createProperty(KB_NS_PREFIX + "material"), revivePolishCharacters(text)); r.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "items"), r2); step.addProperty(rdfModel.createProperty(KB_NS_PREFIX + "didactics"), r); }
private static void addTypeToAll(Resource type, Set<Resource> candidates) { List<Resource> types = equivalentTypes(type); for (Resource element : candidates) { Resource resource = element; for (int i = 0; i < types.size(); i += 1) resource.addProperty(RDF.type, types.get(i)); } }
public static void main(String args[]) { // create an empty model Model model = ModelFactory.createDefaultModel(); // create the resource Resource johnSmith = model.createResource(personURI); // add the property johnSmith.addProperty(VCARD.FN, fullName); }
public void testLinking() throws Exception { // test source model String sourceNS = "http://bla.com/"; Model targetModel = ModelFactory.createDefaultModel(); Resource item1 = targetModel.createResource(sourceNS + "1"); item1.addProperty(RDF.type, OAI.Item); item1.addProperty(DC.title, "TestTitle"); // test target model String targetNS = "http://blub.com/"; Model remoteModel = ModelFactory.createDefaultModel(); Resource item2 = remoteModel.createResource(targetNS + "1"); item2.addProperty(RDF.type, OAI.Item); item2.addProperty(DC.title, "TestTitle"); // create dummy config OAI2LODConfig config = new OAI2LODConfig(); OAI2LODConfig.LinkedSparqlEndpoint endpoint_config = config .createServerConfig("http://bla.com") .createLinkedSparqlEndpoint("http://blub.com", 100); endpoint_config.createLinkingRule( OAI.Item.toString(), DC.title.toString(), OAI.Item.toString(), DC.title.toString(), RDFS.seeAlso.toString(), "uk.ac.shef.wit.simmetrics.similaritymetrics.Levenshtein", 1.0f); LinkingJob linkingJob = new LinkingJobMock(config, targetModel, remoteModel); linkingJob.linkData(); Model result = linkingJob.getTargetModel(); result.write(System.out); }
public Resource createGLIFinstanceMaintenance(Metadata m) { Resource r = rdfModel.createResource(KB_NS_PREFIX + "GLIF3_5_BitPathways_Class_" + classIndex); classIndex++; Resource r_type = rdfModel.createResource(GLIF_Factory.KB_NS_PREFIX + "Maintenance_Info"); r.addProperty(RDF.type, r_type); r.addProperty( rdfModel.createProperty(GLIF_Factory.KB_NS_PREFIX + "title"), m.getTitleMetadata().getName()); // r.addProperty(rdfModel.createProperty(GLIF_Factory.KB_NS_PREFIX+"authoring_date"), // m.getLastModifiedMetadata().getDate()); r.addProperty( rdfModel.createProperty(GLIF_Factory.KB_NS_PREFIX + "author"), m.getAuthorsMetadata()[0].getName()); return r; }
private Resource handleTelephone(Model m, String telNumber) { // create format that can be used by phones String telNumberUri = "tel:" + extractNumber(telNumber); Resource r = m.createResource(telNumberUri); r.addProperty(RDFS.label, telNumber); return r; }
public Model getModel() { Model model = ModelFactory.createDefaultModel(); Resource itemNode = model.createResource(uri); for (Prop prop : values) { if (prop.isLiteral()) { itemNode.addLiteral(prop.getType(), model.createLiteral(prop.getValue())); } else { itemNode.addProperty(prop.getType(), model.createResource(prop.getValue())); } } return model; }
private Model createModel() { Model cubeData = ModelFactory.createDefaultModel(); Resource structure = cubeData.createResource(STRUCTURE, QB.DataStructureDefinition); Resource c1 = cubeData.createResource(STRUCTURE + "/c1", QB.ComponentSpecification); c1.addProperty(RDFS.label, cubeData.createLiteral("Component Specification of Instance", "en")); c1.addProperty(QB.dimension, GK.DIM.Class); Resource c2 = cubeData.createResource(STRUCTURE + "/c2", QB.ComponentSpecification); c2.addProperty(QB.measure, GK.MEASURE.Average); c2.addProperty( RDFS.label, cubeData.createLiteral("Component Specification of Average Surface", "en")); structure.addProperty(QB.component, c1); structure.addProperty(QB.component, c2); cubeData.add(GK.DIM.ClassStatements); cubeData.add(GK.MEASURE.AverageStatements); return cubeData; }
/** * Create a triple for a new node being created. * * @param oMeetingEvent the event whose data to upload. * @param model the model to add the data to. */ private synchronized void addNode( MeetingEvent oMeetingEvent, com.hp.hpl.jena.rdf.model.Model model) { NodeSummary oNode = oMeetingEvent.getNode(); if (oNode == null) { return; } String sNodeID = oNode.getId(); Resource oResNode = model.createResource(oMeetingEvent.getMeetingID() + "-" + sNodeID); // $NON-NLS-1$ Property type = model.createProperty(RDF_NS, "type"); // $NON-NLS-1$ oResNode.addProperty(type, model.createResource(MEMETIC_NS + "Compendium-Node")); // $NON-NLS-1$ int nNodeType = oNode.getType(); String sTripleStoreString = UINodeTypeManager.getTripleStoreDescription(nNodeType); oResNode.addProperty(type, model.createResource(MEMETIC_NS + sTripleStoreString)); if (nNodeType == ICoreConstants.REFERENCE_SHORTCUT) { oResNode.addProperty( model.createProperty(MEMETIC_NS, "has-reference"), oNode.getSource()); // $NON-NLS-1$ } // ADD LABEL oResNode.addProperty( model.createProperty(MEMETIC_NS, "has-label"), oNode.getLabel()); // $NON-NLS-1$ // ADD IF HAS TRIPLESTORE ID String sOriginalID = oNode.getOriginalID(); if (sOriginalID.startsWith("TS:") && !(nNodeType == ICoreConstants.REFERENCE || nNodeType == ICoreConstants.REFERENCE)) { // $NON-NLS-1$ int ind = sOriginalID.indexOf(":"); // $NON-NLS-1$ sOriginalID = sOriginalID.substring(ind + 1); Property has_original_id = model.createProperty(MEMETIC_NS, "has-original-id"); // $NON-NLS-1$ Resource original_id = model.createResource(sOriginalID); oResNode.addProperty(has_original_id, original_id); } }
/** * Create the model and the meeting data for the given meeting id. * * @param oMeeting the object holding the meeting data. * @param model the model to add the data to. */ public synchronized void addMeetingData(Meeting oMeeting, com.hp.hpl.jena.rdf.model.Model model) { Resource meeting = model.createResource(oMeeting.getMeetingID()); meeting.addProperty( model.createProperty(MEETING_NS, "has-transcription"), model.createResource( oMeeting.getMeetingID() + "-" + oMeeting.getMeetingMapID())); // $NON-NLS-1$ //$NON-NLS-2$ // Define the map owner, 'person', Reource and add their type, name, and if Compendium created // (always 'true'). UserProfile oUser = oMeeting.getUser(); if (oUser == null) { oUser = ProjectCompendium.APP.getModel().getUserProfile(); } Resource person = model.createResource(MEMETIC_STUB + oUser.getId()); person.addProperty( model.createProperty(RDF_NS, "type"), model.createResource(PORTAL_NS + "Person")); // $NON-NLS-1$ //$NON-NLS-2$ person.addProperty( model.createProperty(PORTAL_NS, "full-name"), oUser.getUserName()); // $NON-NLS-1$ person.addProperty( model.createProperty(MEMETIC_NS, "is-compendium-created"), "true"); //$NON-NLS-1$ //$NON-NLS-2$ // UPLOAD THE DATA ABOUT THE MEETING MAP NODE ITSELF MeetingEvent oMeetingEvent = new MeetingEvent( oMeeting.getMeetingID(), false, MeetingEvent.NODE_ADDED_EVENT, (View) oMeeting.getMapNode(), oMeeting.getMapNode()); addNode(oMeetingEvent, model); }
/** @param value the value to set */ public void setValue(String value) { this.value = value; if (this.model != null && this.checksumNode != null) { // delete any previous value Property p = model.getProperty(SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_VALUE); model.removeAll(checksumResource, p, null); // add the property p = model.createProperty( SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_VALUE); checksumResource.addProperty(p, value); } }
/** * Creates a resource from this SPDX Checksum * * @param model * @return */ public Resource createResource(Model model) { this.model = model; Resource type = model.createResource( SpdxRdfConstants.SPDX_NAMESPACE + SpdxRdfConstants.CLASS_SPDX_CHECKSUM); Resource r = model.createResource(type); if (algorithm != null) { Property algProperty = model.createProperty( SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_ALGORITHM); Resource algResource = model.createResource(ALGORITHM_TO_URI.get(algorithm)); r.addProperty(algProperty, algResource); } if (this.value != null) { Property valueProperty = model.createProperty( SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_VALUE); r.addProperty(valueProperty, this.value); } this.checksumNode = r.asNode(); this.checksumResource = r; return r; }
@Override protected void processInput(String keggGeneId, String keggGeneRecord, Resource output) { Map<String, String> recordSections = KeggUtils.getSectionsFromKeggRecord(keggGeneRecord); StrTokenizer tokenizer = new StrTokenizer(); if (recordSections.containsKey(PATHWAY_RECORD_SECTION)) { for (String line : recordSections.get(PATHWAY_RECORD_SECTION).split("\\r?\\n")) { String keggPathwayId = tokenizer.reset(line).nextToken(); Resource keggPathwayNode = LSRNUtils.createInstance( output.getModel(), LSRNUtils.getClass(LSRN.Namespace.KEGG_PATHWAY), keggPathwayId); output.addProperty(SIO.is_participant_in, keggPathwayNode); } } }
/** * @param algorithm the algorithm to set * @throws InvalidSPDXAnalysisException */ public void setAlgorithm(String algorithm) throws InvalidSPDXAnalysisException { this.algorithm = algorithm; if (this.model != null && this.checksumNode != null) { Resource algResource = algorithmStringToResource(algorithm, this.model); // delete any previous algorithm Property p = model.getProperty( SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_ALGORITHM); model.removeAll(checksumResource, p, null); // add the property p = model.createProperty( SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_CHECKSUM_ALGORITHM); checksumResource.addProperty(p, algResource); } }
/** * Create a new RDF section resource * * @param model * @param uri * @param title * @param creator * @param level * @param number * @param paragraphs * @return the new created resource */ public static Resource createSection( OntModel model, String title, Resource creator, int level, String number, String[] paragraphs) { // Anonymous resource Resource section = model.createResource(); section.addProperty(RDF.type, QualiPSoDOC.Section); section.addProperty(QualiPSoDOC.title, title); section.addProperty(QualiPSoDOC.creator, creator); section.addProperty(QualiPSoDOC.level, "" + level); section.addProperty(QualiPSoDOC.sectionNumber, number); for (int i = 0; i < paragraphs.length; i++) { // Text anonymous resource Resource text = model.createResource(); text.addProperty(RDF.type, QualiPSoDOC.Text); text.addProperty(QualiPSoDOC.creator, creator); text.addProperty(QualiPSoDOC.textContents, paragraphs[i]); section.addProperty(QualiPSoDOC.hasText, text); } return section; }
/** * Upload the given model to the triplestore * * @param model, the model to upload. * @param sMeetingID the id of the meeting whose data is being uploaded. * @throws MalformedURLException if the urls used to create the HttpRemove and HttpAdd is * malformed. */ public void uploadModel(com.hp.hpl.jena.rdf.model.Model oModel, String sMeetingID) throws MalformedURLException { // System.out.println("About to try and upload: "+oModel.toString()); com.hp.hpl.jena.rdf.model.Model oInnerModel = ModelFactory.createDefaultModel(); Resource meeting = oInnerModel.createResource(sMeetingID); Property comp_is_proc = oInnerModel.createProperty(MEMETIC_NS, "compendium-is-processed"); // $NON-NLS-1$ meeting.addProperty(comp_is_proc, "true"); // $NON-NLS-1$ HttpRemove removeOp = new HttpRemove(sUrl); removeOp.setModel(oInnerModel); removeOp.exec(); oInnerModel.close(); HttpAdd addOp = new HttpAdd(sUrl); addOp.setModel(oModel); addOp.exec(); }