/** * Adds a GAMSO activity to the Jena model. * * @param activityNumber An array of integers with the components of the activity code. * @param activityLabel The label of the activity as read from the Word document. * @param activityDescription The components of the activity description (a <code>List</code> of * strings). */ private void addActivityToModel( int[] activityNumber, String activityLabel, List<String> activityDescription) { String code = String.format("%d", activityNumber[0]); if (activityNumber[1] > 0) code += String.format(".%d", activityNumber[1]); if (activityNumber[2] > 0) code += String.format(".%d", activityNumber[2]); String parentCode = getParentCode(code); logger.debug("Adding activity " + code + " - " + activityLabel); Resource gamsoConcept = gamsoModel.createResource(GAMSO_BASE_URI + code, SKOS.Concept); gamsoConcept.addProperty(RDF.type, CSPAOnto.GAMSOActivity); gamsoConcept.addProperty(SKOS.notation, code); gamsoConcept.addProperty(SKOS.prefLabel, gamsoModel.createLiteral(activityLabel, "en")); gamsoConcept.addProperty(SKOS.inScheme, gamsoCS); if (parentCode == null) { gamsoCS.addProperty(SKOS.hasTopConcept, gamsoConcept); gamsoConcept.addProperty(SKOS.topConceptOf, gamsoCS); } else { Resource parentConcept = gamsoModel.createResource(GAMSO_BASE_URI + parentCode); parentConcept.addProperty(SKOS.narrower, gamsoConcept); gamsoConcept.addProperty(SKOS.broader, parentConcept); } }
public static void main(String[] args) { // create an empty model Model model = ModelFactory.createDefaultModel(); // use the FileManager to find the input file String inputFileName = "src/main/resources/lab-3.ttl"; InputStream in = FileManager.get().open(inputFileName); if (in == null) { throw new IllegalArgumentException("File: " + inputFileName + " not found"); } // read the Turtle file model.read(in, null, "TTL"); // get the inferred mode /* The getDeductionsFunction somehow returns the incorrect answer, so I have to use the deprecated class 'Filter'. */ InfModel infModel = ModelFactory.createRDFSModel(model); ExtendedIterator<Statement> stmts = infModel .listStatements() .filterDrop( new Filter<Statement>() { @Override public boolean accept(Statement o) { return model.contains(o); } }); Model deductionsModel = ModelFactory.createDefaultModel().add(new StmtIteratorImpl(stmts)); // list new RDFS-inferred triples about 'Museion' outputInfo(deductionsModel, "museion"); // list new RDFS-inferred triples about 'Chicken Hut' outputInfo(deductionsModel, "chickenHut"); }
@Test public void test1() throws Exception { InputStream fis = getClass().getResourceAsStream("../mappingFiles/test5.ttl"); JenaR2RMLMappingManager mm = JenaR2RMLMappingManager.getInstance(); Model m = ModelFactory.createDefaultModel(); m = m.read(fis, "testMapping", "TURTLE"); Collection<TriplesMap> coll = mm.importMappings(m); Assert.assertTrue(coll.size() == 1); Iterator<TriplesMap> it = coll.iterator(); while (it.hasNext()) { TriplesMap current = it.next(); SubjectMap s = current.getSubjectMap(); Template t = s.getTemplate(); Assert.assertTrue(t.getColumnName(0).contains("EMPNO")); LogicalTable table = current.getLogicalTable(); SQLBaseTableOrViewImpl ta = (SQLBaseTableOrViewImpl) table; Assert.assertTrue(ta.getTableName().contains("EMP2DEPT")); } }
@Test public void test() throws Exception { InputStream fis = getClass().getResourceAsStream("../mappingFiles/test17.ttl"); JenaR2RMLMappingManager mm = JenaR2RMLMappingManager.getInstance(); Model m = ModelFactory.createDefaultModel(); m = m.read(fis, "testMapping", "TURTLE"); Collection<TriplesMap> coll = mm.importMappings(m); Assert.assertTrue(coll.size() == 1); Iterator<TriplesMap> it = coll.iterator(); while (it.hasNext()) { TriplesMap current = it.next(); Iterator<PredicateObjectMap> iter = current.getPredicateObjectMaps().iterator(); while (iter.hasNext()) { PredicateObjectMap pom = iter.next(); int cont = 0; Iterator<PredicateMap> ii = pom.getPredicateMaps().iterator(); while (ii.hasNext()) { ii.next(); cont++; } Assert.assertTrue(cont == 2); } } }
@Test public void testRead() throws Exception { Model shapesModel = RDFReaderFactory.createResourceReader(shapeResource).read(); List<Shape> shapes = shapesModel .listResourcesWithProperty(RDF.type, SHACL.Shape) .toList() .stream() .map(r -> ShapeReader.create().read(r)) .collect(Collectors.toList()); assertThat(shapes).hasSize(1); Shape sh = shapes.get(0); assertThat(sh.getScopes()).hasSize(ShapeScopeType.values().length); List<ShapeScopeType> scopeTypes = sh.getScopes() .stream() .map(ShapeScope::getScopeType) .distinct() .collect(Collectors.toList()); // distinct scopes assertThat(scopeTypes).hasSize(ShapeScopeType.values().length); }
/** Find the set of resources referenced by the path */ public static Set<RDFNode> getNodes(Model model, RDFNode first, Path path) { List<Step> steps = path.getSteps(); Set<RDFNode> starts = Collections.singleton(first); for (Step step : steps) { String propertyName = step.getPropertyName(); boolean isInverse = step.isInverse(); Property property = model.createProperty(propertyName); Set<RDFNode> nodes = new HashSet<RDFNode>(); for (RDFNode start : starts) { Set<RDFNode> tmp; if (!isInverse) { tmp = model.listObjectsOfProperty(start.asResource(), property).toSet(); } else if (start.isResource()) { tmp = new HashSet<RDFNode>(model.listSubjectsWithProperty(property, start).toSet()); } else { tmp = Collections.<RDFNode>emptySet(); } nodes.addAll(tmp); } starts = nodes; } return starts; }
/** * Determine whether the given property is recognized and treated specially by this reasoner. This * is a convenience packaging of a special case of getCapabilities. * * @param property the property which we want to ask the reasoner about, given as a Node since * this is part of the SPI rather than API * @return true if the given property is handled specially by the reasoner. */ @Override public boolean supportsProperty(Property property) { if (factory == null) return false; Model caps = factory.getCapabilities(); Resource root = caps.getResource(factory.getURI()); return caps.contains(root, ReasonerVocabulary.supportsP, property); }
private Dataset assemble(final Resource example) { Model model = example.getModel(); model.setNsPrefix("ja", JA.getURI()); // System.out.println("-------------"); // RDFDataMgr.write(System.out, model, Lang.TTL) ; final InMemDatasetAssembler testAssembler = new InMemDatasetAssembler(); return testAssembler.open(testAssembler, example, DEFAULT); }
@Test public void testEquality() throws Exception { Model ttl = ModelFactory.createDefaultModel().read(getTTLInput(), NS, "TTL"); Model rdf = ModelFactory.createDefaultModel().read(getRDFInput(), NS, "RDF/XML-ABBREV"); assertTrue(ttl.isIsomorphicWith(rdf)); assertTrue(rdf.isIsomorphicWith(ttl)); }
@Test public void emptyDataset() { final Model model = createDefaultModel(); final Resource empty = model.createResource("test:empty"); empty.addProperty(type, DatasetAssemblerVocab.tDatasetTxnMem); Dataset dataset = assemble(empty); assertFalse(dataset.asDatasetGraph().find().hasNext()); }
@Test public void directDataLinkToQuads() throws IOException { // first make a file of quads to load later final Model model = createDefaultModel(); final Path quads = createTempFile("quadExample", ".nq"); final Resource quadsURI = model.createResource(quads.toFile().toURI().toString()); final Resource simpleExample = model.createResource("test:simpleExample"); simpleExample.addProperty(type, DatasetAssemblerVocab.tDatasetTxnMem); simpleExample.addProperty(data, quadsURI); final DatasetGraph dsg = createTxnMem().asDatasetGraph(); model .listStatements() .mapWith(Statement::asTriple) .mapWith(t -> new Quad(quadsURI.asNode(), t)) .forEachRemaining(dsg::add); try (OutputStream out = new FileOutputStream(quads.toFile())) { write(out, dsg, NQUADS); } final Dataset dataset = assemble(simpleExample); final Model assembledDefaultModel = dataset.getDefaultModel(); final Model assembledNamedModel = dataset.getNamedModel(quadsURI.getURI()); assertTrue(assembledDefaultModel.isEmpty()); assertTrue( assembledNamedModel.contains( assembledNamedModel.createStatement(simpleExample, data, quadsURI))); }
/** Run a single test of any sort, return true if the test succeeds. */ public boolean doRunTest(Resource test) throws IOException { if (test.hasProperty(RDF.type, OWLTest.PositiveEntailmentTest) || test.hasProperty(RDF.type, OWLTest.NegativeEntailmentTest) || test.hasProperty(RDF.type, OWLTest.OWLforOWLTest) || test.hasProperty(RDF.type, OWLTest.ImportEntailmentTest) || test.hasProperty(RDF.type, OWLTest.TrueTest)) { // Entailment tests boolean processImports = test.hasProperty(RDF.type, OWLTest.ImportEntailmentTest); Model premises = getDoc(test, RDFTest.premiseDocument, processImports); Model conclusions = getDoc(test, RDFTest.conclusionDocument); comprehensionAxioms(premises, conclusions); long t1 = System.currentTimeMillis(); InfGraph graph = reasoner.bind(premises.getGraph()); if (printProfile) { ((FBRuleInfGraph) graph).resetLPProfile(true); } Model result = ModelFactory.createModelForGraph(graph); boolean correct = WGReasonerTester.testConclusions(conclusions.getGraph(), result.getGraph()); long t2 = System.currentTimeMillis(); lastTestDuration = t2 - t1; if (printProfile) { ((FBRuleInfGraph) graph).printLPProfile(); } if (test.hasProperty(RDF.type, OWLTest.NegativeEntailmentTest)) { correct = !correct; } return correct; } else if (test.hasProperty(RDF.type, OWLTest.InconsistencyTest)) { // System.out.println("Starting: " + test); Model input = getDoc(test, RDFTest.inputDocument); long t1 = System.currentTimeMillis(); InfGraph graph = reasoner.bind(input.getGraph()); boolean correct = !graph.validate().isValid(); long t2 = System.currentTimeMillis(); lastTestDuration = t2 - t1; return correct; } else if (test.hasProperty(RDF.type, OWLTest.ConsistencyTest)) { // Not used normally becase we are not complete enough to prove consistency // System.out.println("Starting: " + test); Model input = getDoc(test, RDFTest.inputDocument); long t1 = System.currentTimeMillis(); InfGraph graph = reasoner.bind(input.getGraph()); boolean correct = graph.validate().isValid(); long t2 = System.currentTimeMillis(); lastTestDuration = t2 - t1; return correct; } else { for (StmtIterator i = test.listProperties(RDF.type); i.hasNext(); ) { System.out.println("Test type = " + i.nextStatement().getObject()); } throw new ReasonerException("Unknown test type"); } }
public void testWithContent() throws IOException { File f = FileUtils.tempFileName("assembler-acceptance-", ".n3"); Model data = model("a P b; b Q c"); try (FileOutputStream fs = new FileOutputStream(f)) { data.write(fs, "N3"); } Resource root = resourceInModel( "x rdf:type ja:MemoryModel; x ja:content y; y ja:externalContent file:" + f.getAbsolutePath()); Model m = Assembler.general.openModel(root); assertIsoModels(data, m); }
public static ArrayList<RdfModel> processRDF(InputStream in) { Model model = ModelFactory.createDefaultModel(); ArrayList<RdfModel> result = new ArrayList<RdfModel>(); if (in != null) { model.read(in, "RDF/XML"); // Now, I only care these propties: has-title, year-of, full-name. All three of them must // exist' for (final ResIterator it = model.listSubjectsWithProperty(RdfPropertyList.p_hasTitle); it.hasNext(); ) { RdfModel rm = new RdfModel(); try { final Resource node = it.next().asResource(); // node is a resource which has title property rm.setHasTitle(node.getProperty(RdfPropertyList.p_hasTitle).getString()); StringBuilder authors = new StringBuilder(); StringBuilder dates = new StringBuilder(); for (final StmtIterator all_props = node.listProperties(); all_props.hasNext(); ) { try { Resource all_res = all_props.next().getObject().asResource(); StmtIterator fullnames = all_res.listProperties(RdfPropertyList.p_fullName); StmtIterator years = all_res.listProperties(RdfPropertyList.p_year); // Just for now I may have mutiple author or dates in a String, seperated by comma RdfProcess newprocess = new RdfProcess(); while (fullnames.hasNext()) { String fullname = newprocess.getValue(fullnames.next().getObject()); if (!fullname.equals("Invalid/Lack of Information")) { authors.append(fullname + " , "); } } while (years.hasNext()) { String year = newprocess.getValue(years.next().getObject()); if (!year.equals("Invalid/Lack of Information")) { dates.append(year + " , "); } } } catch (Exception e) { } } rm.setHasDate(dates.toString()); rm.setHasAuthor(authors.toString()); } catch (Exception e) { } result.add(rm); } } return result; }
/** Initialize the result model. */ public void initResults() { testResults = ModelFactory.createDefaultModel(); jena2 = testResults.createResource(BASE_RESULTS_URI + "#jena2"); jena2.addProperty( RDFS.comment, testResults.createLiteral( "<a xmlns=\"http://www.w3.org/1999/xhtml\" href=\"http://jena.sourceforce.net/\">Jena2</a> includes a rule-based inference engine for RDF processing, " + "supporting both forward and backward chaining rules. Its OWL rule set is designed to provide sound " + "but not complete instance resasoning for that fragment of OWL/Full limited to the OWL/lite vocabulary. In" + "particular it does not support unionOf/complementOf.", true)); jena2.addProperty(RDFS.label, "Jena2"); testResults.setNsPrefix("results", OWLResults.NS); }
public static void main(String... argv) throws Exception { // //Model model = ModelFactory.createDefaultModel() ; // //String x = "<s> <p> 'verify it works' ." ; // // // //Reader sr = getTTLReader(); // //model.read(sr, "http://example/", "TTL") ; // //model.read(sr, "", "TTL") ; // //model.read( getRDFInput() ); // Model ttl = ModelFactory.createDefaultModel().read( getTTLInput(), // "", "TTL"); // Model rdf = ModelFactory.createDefaultModel().read( getRDFInput(), // "", "RDF/XML-ABBREV"); // // ttl.write(System.out, "RDF/XML-ABBREV") ; // System.out.println("-----") ; // // model.setNsPrefix("ex", "http://example/") ; // rdf.write(System.out, "N-TRIPLES") ; // System.out.println("-----") ; // System.out.println( getTTLName() ); // System.out.println( "ttl iso rdf: "+ttl.isIsomorphicWith(rdf)); // // System.out.println( getRDFName() ); // System.out.println( "rdf iso ttl: "+rdf.isIsomorphicWith(ttl)); String[] lines = { "<rdf:RDF", " xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\">", " <rdf:Description rdf:about=\"e\">", " <p5>verify base works</p5>", " </rdf:Description>", "</rdf:RDF>" }; String eol = System.getProperty("line.separator"); StringBuilder sb = new StringBuilder(); for (String l : lines) { sb.append(l).append(eol); } Model model = ModelFactory.createDefaultModel(); StringReader sr = new StringReader(sb.toString()); model.read(sr, "http://example/"); model.write(System.out, "N-TRIPLES"); System.out.println("-----"); model.setNsPrefix("ex", "http://example/"); model.write(System.out, "RDF/XML-ABBREV", "http://another/"); }
@Override public void initResource() { applicationEntity = model.createResource(baseuri + this.dto.get_uri()); parentResource = model.createResource(baseuri + "/" + Utils.getParentURI(this.dto.get_uri())); resourceName = dto.getRn(); resourceId = dto.getRi(); if (dto.getLbl() != null) { for (int i = 0; i < dto.getLbl().length; i++) { label = label + "," + dto.getLbl()[i]; } } else { label = ""; } ontologyReference = this.dto.getOr(); }
@Test public void dataset_05() { String graphName = "http://example/"; Dataset ds = createFixed(); ds.addNamedModel(graphName, model1); ds.replaceNamedModel(graphName, model2); assertTrue(ds.containsNamedModel(graphName)); List<String> x = Iter.toList(ds.listNames()); assertEquals(1, x.size()); assertEquals(graphName, x.get(0)); assertFalse(model1.isIsomorphicWith(ds.getNamedModel(graphName))); assertTrue(model2.isIsomorphicWith(ds.getNamedModel(graphName))); }
private Model JoinMap(Model map) { Model newMap = ModelFactory.createDefaultModel(); StmtIterator stmts = map.listStatements(); while (stmts.hasNext()) { Statement s = stmts.next(); if (s.getSubject().isURIResource() || s.getObject().isURIResource()) { s = newMap.createStatement( checkUri(s.getSubject().asNode(), newMap).asResource(), s.getPredicate(), checkUri(s.getObject().asNode(), newMap)); } newMap.add(s); } return newMap; }
/** Return a list of all tests of the given type, according to the current filters */ public List<Resource> findTestsOfType(Resource testType) { ArrayList<Resource> result = new ArrayList<>(); StmtIterator si = testDefinitions.listStatements(null, RDF.type, testType); while (si.hasNext()) { Resource test = si.nextStatement().getSubject(); boolean accept = true; // Check test status Literal status = (Literal) test.getProperty(RDFTest.status).getObject(); if (approvedOnly) { accept = status.getString().equals(STATUS_FLAGS[0]); } else { accept = false; for (String STATUS_FLAG : STATUS_FLAGS) { if (status.getString().equals(STATUS_FLAG)) { accept = true; break; } } } // Check for blocked tests for (String BLOCKED_TEST : BLOCKED_TESTS) { if (BLOCKED_TEST.equals(test.toString())) { accept = false; } } // End of filter tests if (accept) { result.add(test); } } return result; }
/** Test RDF model of a Context. */ @Test public final void testRdfModel() { final Context context = new Context("My favorite actress is: Natalie Portman. She is very " + "stunning.", 0, 62); final Sentence sentence = NullSentence.getInstance(); final Model model = ModelFactory.createDefaultModel(); context.addSentence(sentence); final String nif = "http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#"; final Map<String, String> prefixes = new HashMap<>(); prefixes.put("nif", nif); final String base = "http://127.0.0.1/stanfordnlp#"; prefixes.put("local", base); prefixes.put("xsd", "http://www.w3.org/2001/XMLSchema#"); model.setNsPrefixes(prefixes); model.add( ResourceFactory.createResource(base + "char=0,62"), RDF.type, ResourceFactory.createResource(nif + "String")); model.add( ResourceFactory.createResource(base + "char=0,62"), RDF.type, ResourceFactory.createResource(nif + "RFC5147String")); model.add( ResourceFactory.createResource(base + "char=0,62"), RDF.type, ResourceFactory.createResource(nif + "Context")); model.add( ResourceFactory.createResource(base + "char=0,62"), ResourceFactory.createProperty(nif + "beginIndex"), ResourceFactory.createTypedLiteral("0", XSDDatatype.XSDnonNegativeInteger)); model.add( ResourceFactory.createResource(base + "char=0,62"), ResourceFactory.createProperty(nif + "endIndex"), ResourceFactory.createTypedLiteral("62", XSDDatatype.XSDnonNegativeInteger)); model.add( ResourceFactory.createResource(base + "char=0,62"), ResourceFactory.createProperty(nif + "isString"), ResourceFactory.createTypedLiteral( "My favorite actress is: Natalie Portman. She is very " + "stunning.")); Assert.assertTrue( "Issue to create the model for a Context", model.isIsomorphicWith(context.rdfModel("stanfordnlp", NlpProcess.POS))); }
@Test public void testSecurityEvaluatorWithModelArgs() throws Exception { Resource r = model.createResource("http://apache.org/jena/permissions/test#secEvaluator2"); Object o = assembler.open(r); Assert.assertTrue(o instanceof SecurityEvaluator); Assert.assertTrue(o instanceof ModelBasedSecurityEvaluator); }
@Test public void testCreationWithArgs() throws Exception { Resource r = model.createResource("http://apache.org/jena/permissions/test#secModel2"); Object o = assembler.open(r); Assert.assertTrue(o instanceof Model); Assert.assertTrue(o instanceof SecuredModel); }
/** * Example the conclusions graph for introduction of restrictions which require a comprehension * rewrite and declare new (anon) classes for those restrictions. */ public void comprehensionAxioms(Model premises, Model conclusions) { // Comprehend all restriction declarations and note them in a map Map<Resource, Resource> comprehension = new HashMap<>(); StmtIterator ri = conclusions.listStatements(null, RDF.type, OWL.Restriction); while (ri.hasNext()) { Resource restriction = ri.nextStatement().getSubject(); StmtIterator pi = restriction.listProperties(OWL.onProperty); while (pi.hasNext()) { Resource prop = (Resource) pi.nextStatement().getObject(); StmtIterator vi = restriction.listProperties(); while (vi.hasNext()) { Statement rs = vi.nextStatement(); if (!rs.getPredicate().equals(OWL.onProperty)) { // Have a restriction on(prop) of type rs in the conclusions // So assert a premise that such a restriction could exisit Resource comp = premises .createResource() .addProperty(RDF.type, OWL.Restriction) .addProperty(OWL.onProperty, prop) .addProperty(rs.getPredicate(), rs.getObject()); comprehension.put(restriction, comp); } } } } // Comprehend any intersectionOf lists. Introduce anon class which has the form // of the intersection expression. // Rewrite queries of the form (X intersectionOf Y) to the form // (X equivalentClass ?CC) (?CC intersectionOf Y) StmtIterator ii = conclusions.listStatements(null, OWL.intersectionOf, (RDFNode) null); List<Statement> intersections = new ArrayList<>(); while (ii.hasNext()) { intersections.add(ii.nextStatement()); } for (Statement is : intersections) { // Declare in the premises that such an intersection exists Resource comp = premises .createResource() .addProperty(RDF.type, OWL.Class) .addProperty( OWL.intersectionOf, mapList(premises, (Resource) is.getObject(), comprehension)); // Rewrite the conclusions to be a test for equivalence between the class being // queried and the comprehended interesection conclusions.remove(is); conclusions.add(is.getSubject(), OWL.equivalentClass, comp); } // Comprehend any oneOf lists StmtIterator io = conclusions.listStatements(null, OWL.oneOf, (RDFNode) null); while (io.hasNext()) { Statement s = io.nextStatement(); Resource comp = premises.createResource().addProperty(OWL.oneOf, s.getObject()); } }
@Before public void setUp() throws Exception { model = ModelFactory.createDefaultModel(); URL url = SecuredAssemblerTest.class .getClassLoader() .getResource(SecuredAssemblerTest.class.getName().replace(".", "/") + ".ttl"); model.read(url.toURI().toString(), "TURTLE"); }
public static void main(String args[]) { OntModel m = ModelFactory.createOntologyModel(); OntDocumentManager dm = m.getDocumentManager(); dm.addAltEntry( "http://www.eswc2006.org/technologies/ontology", "file:" + JENA + "src/examples/resources/eswc-2006-09-21.rdf"); m.read("http://www.eswc2006.org/technologies/ontology"); // create an empty model Model model = ModelFactory.createDefaultModel(); // create the resource Resource johnSmith = model.createResource(personURI); // add the property johnSmith.addProperty(VCARD.FN, fullName); johnSmith.addProperty( VCARD.N, model.createResource().addProperty(VCARD.Given, "jon").addProperty(VCARD.Family, "Smit")); // list the statements in the Model StmtIterator iter = model.listStatements(); // print out the predicate, subject and object of each statement while (iter.hasNext()) { Statement stmt = iter.nextStatement(); // get next statement Resource subject = stmt.getSubject(); // get the subject Property predicate = stmt.getPredicate(); // get the predicate RDFNode object = stmt.getObject(); // get the object System.out.print(subject.toString()); System.out.print(" " + predicate.toString() + " "); if (object instanceof Resource) { System.out.print(object.toString()); } else { // object is a literal System.out.print(" \"" + object.toString() + "\""); } System.out.println(" ."); } }
public void testBasic() { Model mrdfs = ModelFactory.createRDFSModel(ModelFactory.createDefaultModel()); Model concrete = ModelFactory.createDefaultModel(); concrete.add(mrdfs); assertIsomorphic(concrete.getGraph(), mrdfs.getGraph()); assertIsomorphic(mrdfs.getGraph(), concrete.getGraph()); }
@Test public void dataset_04() { String graphName = "http://example/"; Dataset ds = createFixed(); ds.addNamedModel(graphName, model1); assertTrue(ds.containsNamedModel(graphName)); List<String> x = Iter.toList(ds.listNames()); assertEquals(1, x.size()); assertEquals(graphName, x.get(0)); assertFalse(model1.isIsomorphicWith(ds.getDefaultModel())); Model m = ds.getNamedModel(graphName); assertNotNull(m); assertTrue(model1.isIsomorphicWith(m)); ds.removeNamedModel(graphName); // Not getNamedModel and test for null as some datasets are "auto graph creating" assertFalse(ds.containsNamedModel(graphName)); }
public static void main(String[] args) { DOMParser domparser = new DOMParser(); LogCtl.setLog4j("jena-log4j.properties"); Model model = ModelFactory.createDefaultModel(); domparser.buildModel(model); String fileName = "10Authors.rdf"; FileWriter out; try { out = new FileWriter(fileName); model.write(out, "RDF/XML-ABBREV"); out.close(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } }
public static void main(String[] args) throws IOException { if (args.length != 2) { err.println("usage: difference <db1> <db2>"); } Dataset ds1 = dataset_(args[0]); Dataset ds2 = dataset_(args[1]); Model m1 = ds1.getDefaultModel(); Model m2 = ds2.getDefaultModel(); System.out.println(m1.size()); System.out.println(m2.size()); Model m1_minus_m2 = m1.difference(m2); RDFDataMgr.write( new FileOutputStream("missing-orthologousMatch-in-biological-concepts-db.ttl"), m1_minus_m2, Lang.TURTLE); ds1.close(); ds2.close(); }