/** * Adds a GAMSO activity to the Jena model. * * @param activityNumber An array of integers with the components of the activity code. * @param activityLabel The label of the activity as read from the Word document. * @param activityDescription The components of the activity description (a <code>List</code> of * strings). */ private void addActivityToModel( int[] activityNumber, String activityLabel, List<String> activityDescription) { String code = String.format("%d", activityNumber[0]); if (activityNumber[1] > 0) code += String.format(".%d", activityNumber[1]); if (activityNumber[2] > 0) code += String.format(".%d", activityNumber[2]); String parentCode = getParentCode(code); logger.debug("Adding activity " + code + " - " + activityLabel); Resource gamsoConcept = gamsoModel.createResource(GAMSO_BASE_URI + code, SKOS.Concept); gamsoConcept.addProperty(RDF.type, CSPAOnto.GAMSOActivity); gamsoConcept.addProperty(SKOS.notation, code); gamsoConcept.addProperty(SKOS.prefLabel, gamsoModel.createLiteral(activityLabel, "en")); gamsoConcept.addProperty(SKOS.inScheme, gamsoCS); if (parentCode == null) { gamsoCS.addProperty(SKOS.hasTopConcept, gamsoConcept); gamsoConcept.addProperty(SKOS.topConceptOf, gamsoCS); } else { Resource parentConcept = gamsoModel.createResource(GAMSO_BASE_URI + parentCode); parentConcept.addProperty(SKOS.narrower, gamsoConcept); gamsoConcept.addProperty(SKOS.broader, parentConcept); } }
@Override public void populateEntity( RdfPersistenceContext persistenceContext, Object entity, Node subject, Graph graph, Consumer<Triple> outSink) { Model model = ModelFactory.createModelForGraph(graph); RDFNode root = ModelUtils.convertGraphNodeToRDFNode(subject, model); // <Object, Object> Map map = createMapView.apply(entity); for (Statement stmt : root.asResource().listProperties(entry).toList()) { Resource e = stmt.getObject().asResource(); Node kNode = e.getProperty(key).getObject().asNode(); Node vNode = e.getProperty(value).getObject().asNode(); // TODO: We need to dynamically figure out which entity the node could be RdfType rdfType = null; Object k = persistenceContext.entityFor( Object.class, kNode, null); // new TypedNode(rdfType, kNode)); Object v = persistenceContext.entityFor( Object.class, vNode, null); // new TypedNode(rdfType, vNode)); map.put(k, v); } }
/** Return a list of all tests of the given type, according to the current filters */ public List<Resource> findTestsOfType(Resource testType) { ArrayList<Resource> result = new ArrayList<>(); StmtIterator si = testDefinitions.listStatements(null, RDF.type, testType); while (si.hasNext()) { Resource test = si.nextStatement().getSubject(); boolean accept = true; // Check test status Literal status = (Literal) test.getProperty(RDFTest.status).getObject(); if (approvedOnly) { accept = status.getString().equals(STATUS_FLAGS[0]); } else { accept = false; for (String STATUS_FLAG : STATUS_FLAGS) { if (status.getString().equals(STATUS_FLAG)) { accept = true; break; } } } // Check for blocked tests for (String BLOCKED_TEST : BLOCKED_TESTS) { if (BLOCKED_TEST.equals(test.toString())) { accept = false; } } // End of filter tests if (accept) { result.add(test); } } return result; }
@Override public SecuredResource getResource(final int index) throws ReadDeniedException, AuthenticationRequiredException { checkRead(); final Resource retval = holder.getBaseItem().getResource(index); checkRead(new Triple(holder.getBaseItem().asNode(), RDF.li(index).asNode(), retval.asNode())); return SecuredResourceImpl.getInstance(getModel(), retval); }
@Test public void emptyDataset() { final Model model = createDefaultModel(); final Resource empty = model.createResource("test:empty"); empty.addProperty(type, DatasetAssemblerVocab.tDatasetTxnMem); Dataset dataset = assemble(empty); assertFalse(dataset.asDatasetGraph().find().hasNext()); }
/** * Update or persist a domain object outside String, Date, and the usual primitive types. We set * the write style to shallow=true, causing an end of recursive traversal of the object graph. * * @param subject * @param property * @param o */ private void setPropertyValue(Resource subject, Property property, Object o) { Statement s = subject.getProperty(property); Resource existing = null; if (s != null) { existing = s.getResource(); if (existing.isAnon()) existing.removeProperties(); } subject.removeAll(property).addProperty(property, _write(o, true)); }
/** * Example the conclusions graph for introduction of restrictions which require a comprehension * rewrite and declare new (anon) classes for those restrictions. */ public void comprehensionAxioms(Model premises, Model conclusions) { // Comprehend all restriction declarations and note them in a map Map<Resource, Resource> comprehension = new HashMap<>(); StmtIterator ri = conclusions.listStatements(null, RDF.type, OWL.Restriction); while (ri.hasNext()) { Resource restriction = ri.nextStatement().getSubject(); StmtIterator pi = restriction.listProperties(OWL.onProperty); while (pi.hasNext()) { Resource prop = (Resource) pi.nextStatement().getObject(); StmtIterator vi = restriction.listProperties(); while (vi.hasNext()) { Statement rs = vi.nextStatement(); if (!rs.getPredicate().equals(OWL.onProperty)) { // Have a restriction on(prop) of type rs in the conclusions // So assert a premise that such a restriction could exisit Resource comp = premises .createResource() .addProperty(RDF.type, OWL.Restriction) .addProperty(OWL.onProperty, prop) .addProperty(rs.getPredicate(), rs.getObject()); comprehension.put(restriction, comp); } } } } // Comprehend any intersectionOf lists. Introduce anon class which has the form // of the intersection expression. // Rewrite queries of the form (X intersectionOf Y) to the form // (X equivalentClass ?CC) (?CC intersectionOf Y) StmtIterator ii = conclusions.listStatements(null, OWL.intersectionOf, (RDFNode) null); List<Statement> intersections = new ArrayList<>(); while (ii.hasNext()) { intersections.add(ii.nextStatement()); } for (Statement is : intersections) { // Declare in the premises that such an intersection exists Resource comp = premises .createResource() .addProperty(RDF.type, OWL.Class) .addProperty( OWL.intersectionOf, mapList(premises, (Resource) is.getObject(), comprehension)); // Rewrite the conclusions to be a test for equivalence between the class being // queried and the comprehended interesection conclusions.remove(is); conclusions.add(is.getSubject(), OWL.equivalentClass, comp); } // Comprehend any oneOf lists StmtIterator io = conclusions.listStatements(null, OWL.oneOf, (RDFNode) null); while (io.hasNext()) { Statement s = io.nextStatement(); Resource comp = premises.createResource().addProperty(OWL.oneOf, s.getObject()); } }
/** Run a single test of any sort, return true if the test succeeds. */ public boolean doRunTest(Resource test) throws IOException { if (test.hasProperty(RDF.type, OWLTest.PositiveEntailmentTest) || test.hasProperty(RDF.type, OWLTest.NegativeEntailmentTest) || test.hasProperty(RDF.type, OWLTest.OWLforOWLTest) || test.hasProperty(RDF.type, OWLTest.ImportEntailmentTest) || test.hasProperty(RDF.type, OWLTest.TrueTest)) { // Entailment tests boolean processImports = test.hasProperty(RDF.type, OWLTest.ImportEntailmentTest); Model premises = getDoc(test, RDFTest.premiseDocument, processImports); Model conclusions = getDoc(test, RDFTest.conclusionDocument); comprehensionAxioms(premises, conclusions); long t1 = System.currentTimeMillis(); InfGraph graph = reasoner.bind(premises.getGraph()); if (printProfile) { ((FBRuleInfGraph) graph).resetLPProfile(true); } Model result = ModelFactory.createModelForGraph(graph); boolean correct = WGReasonerTester.testConclusions(conclusions.getGraph(), result.getGraph()); long t2 = System.currentTimeMillis(); lastTestDuration = t2 - t1; if (printProfile) { ((FBRuleInfGraph) graph).printLPProfile(); } if (test.hasProperty(RDF.type, OWLTest.NegativeEntailmentTest)) { correct = !correct; } return correct; } else if (test.hasProperty(RDF.type, OWLTest.InconsistencyTest)) { // System.out.println("Starting: " + test); Model input = getDoc(test, RDFTest.inputDocument); long t1 = System.currentTimeMillis(); InfGraph graph = reasoner.bind(input.getGraph()); boolean correct = !graph.validate().isValid(); long t2 = System.currentTimeMillis(); lastTestDuration = t2 - t1; return correct; } else if (test.hasProperty(RDF.type, OWLTest.ConsistencyTest)) { // Not used normally becase we are not complete enough to prove consistency // System.out.println("Starting: " + test); Model input = getDoc(test, RDFTest.inputDocument); long t1 = System.currentTimeMillis(); InfGraph graph = reasoner.bind(input.getGraph()); boolean correct = graph.validate().isValid(); long t2 = System.currentTimeMillis(); lastTestDuration = t2 - t1; return correct; } else { for (StmtIterator i = test.listProperties(RDF.type); i.hasNext(); ) { System.out.println("Test type = " + i.nextStatement().getObject()); } throw new ReasonerException("Unknown test type"); } }
private void saveOrUpdate(Resource subject, ValuesContext pc) { Object o = pc.invokeGetter(); Property property = toRdfProperty(pc); if (Saver.supports(pc.type())) Saver.of(pc.type()).save(this, subject, property, o); else if (o == null) subject.removeAll(property); else if (pc.isPrimitive()) subject.removeAll(property).addProperty(property, toLiteral(m, o)); else if (isNormalObject(o)) setPropertyValue(subject, property, o); else logger.log( Level.WARNING, MessageFormat.format( bundle.getString(UNSUPPORTED_TYPE), pc.type(), pc.subject.getClass())); }
private Resource toResource(Object bean) { String uri = instanceURI(bean); Resource type = getRDFSClass(bean); if (jpa.isEmbedded(bean) || uri == null) return m.createResource(type); else { // added by saeed to differentiate between save and update Resource resource = m.createResource(uri); if (resource.getProperty(RDF.type) == null) { resource.addProperty(RDF.type, type); } return resource; } }
/** Initialize the result model. */ public void initResults() { testResults = ModelFactory.createDefaultModel(); jena2 = testResults.createResource(BASE_RESULTS_URI + "#jena2"); jena2.addProperty( RDFS.comment, testResults.createLiteral( "<a xmlns=\"http://www.w3.org/1999/xhtml\" href=\"http://jena.sourceforce.net/\">Jena2</a> includes a rule-based inference engine for RDF processing, " + "supporting both forward and backward chaining rules. Its OWL rule set is designed to provide sound " + "but not complete instance resasoning for that fragment of OWL/Full limited to the OWL/lite vocabulary. In" + "particular it does not support unionOf/complementOf.", true)); jena2.addProperty(RDFS.label, "Jena2"); testResults.setNsPrefix("results", OWLResults.NS); }
@Test public void testSelectToWurcsSparql() throws SparqlException, UnsupportedEncodingException { GlycoSequenceToWurcsSelectSparql s = new GlycoSequenceToWurcsSelectSparql("glycoct"); SparqlEntity se = new SparqlEntity(); se.setValue( GlycoSequenceToWurcsSelectSparql.FromSequence, "RES\n1b:a-dgal-HEX-1:5\n2s:n-acetyl\n3b:b-dgal-HEX-1:5\n4b:b-dglc-HEX-1:5\n5s:n-acetyl\n6b:b-dgal-HEX-1:5\n7b:a-lgal-HEX-1:5|6:d\n8b:b-dglc-HEX-1:5\n9s:n-acetyl\n10b:b-dglc-HEX-1:5\n11s:n-acetyl\n12b:b-dgal-HEX-1:5\n13b:a-lgal-HEX-1:5|6:d\nLIN\n1:1d(2+1)2n\n2:1o(3+1)3d\n3:3o(3+1)4d\n4:4d(2+1)5n\n5:4o(4+1)6d\n6:6o(2+1)7d\n7:3o(6+1)8d\n8:8d(2+1)9n\n9:1o(6+1)10d\n10:10d(2+1)11n\n11:10o(4+1)12d\n12:12o(2+1)13d" .replaceAll("\n", "\\\\n")); s.setSparqlEntity(se); logger.debug(s.getSparql()); Query query = QueryFactory.create(s.getSparql().replaceAll("null", "").replace("?Sequence", "")); // QueryExecution qe = // QueryExecutionFactory.sparqlService("http://localhost:3030/glycobase/query",query); QueryExecution qe = QueryExecutionFactory.sparqlService("http://test.ts.glytoucan.org/sparql", query); ResultSet rs = qe.execSelect(); List<SparqlEntity> results = new ArrayList<SparqlEntity>(); while (rs.hasNext()) { QuerySolution row = rs.next(); Iterator<String> columns = row.varNames(); SparqlEntity se2 = new SparqlEntity(); while (columns.hasNext()) { String column = columns.next(); RDFNode cell = row.get(column); if (cell.isResource()) { Resource resource = cell.asResource(); // do something maybe with the OntModel??? if (resource.isLiteral()) se.setValue(column, resource.asLiteral().getString()); else se.setValue(column, resource.toString()); } else if (cell.isLiteral()) { se.setValue(column, cell.asLiteral().getString()); } else if (cell.isAnon()) { se.setValue(column, "anon"); } else { se.setValue(column, cell.toString()); } } results.add(se); } for (SparqlEntity entity : results) { System.out.println("results: " + entity.getValue("PrimaryId")); } }
private Dataset assemble(final Resource example) { Model model = example.getModel(); model.setNsPrefix("ja", JA.getURI()); // System.out.println("-------------"); // RDFDataMgr.write(System.out, model, Lang.TTL) ; final InMemDatasetAssembler testAssembler = new InMemDatasetAssembler(); return testAssembler.open(testAssembler, example, DEFAULT); }
/** Load the premises or conclusions for the test. */ public Model getDoc(Resource test, Property docType) throws IOException { Model result = ModelFactory.createDefaultModel(); StmtIterator si = test.listProperties(docType); while (si.hasNext()) { String fname = si.nextStatement().getObject().toString() + ".rdf"; loadFile(fname, result); } return result; }
public static void main(String args[]) { OntModel m = ModelFactory.createOntologyModel(); OntDocumentManager dm = m.getDocumentManager(); dm.addAltEntry( "http://www.eswc2006.org/technologies/ontology", "file:" + JENA + "src/examples/resources/eswc-2006-09-21.rdf"); m.read("http://www.eswc2006.org/technologies/ontology"); // create an empty model Model model = ModelFactory.createDefaultModel(); // create the resource Resource johnSmith = model.createResource(personURI); // add the property johnSmith.addProperty(VCARD.FN, fullName); johnSmith.addProperty( VCARD.N, model.createResource().addProperty(VCARD.Given, "jon").addProperty(VCARD.Family, "Smit")); // list the statements in the Model StmtIterator iter = model.listStatements(); // print out the predicate, subject and object of each statement while (iter.hasNext()) { Statement stmt = iter.nextStatement(); // get next statement Resource subject = stmt.getSubject(); // get the subject Property predicate = stmt.getPredicate(); // get the predicate RDFNode object = stmt.getObject(); // get the object System.out.print(subject.toString()); System.out.print(" " + predicate.toString() + " "); if (object instanceof Resource) { System.out.print(object.toString()); } else { // object is a literal System.out.print(" \"" + object.toString() + "\""); } System.out.println(" ."); } }
@Test public void directDataLinkToQuads() throws IOException { // first make a file of quads to load later final Model model = createDefaultModel(); final Path quads = createTempFile("quadExample", ".nq"); final Resource quadsURI = model.createResource(quads.toFile().toURI().toString()); final Resource simpleExample = model.createResource("test:simpleExample"); simpleExample.addProperty(type, DatasetAssemblerVocab.tDatasetTxnMem); simpleExample.addProperty(data, quadsURI); final DatasetGraph dsg = createTxnMem().asDatasetGraph(); model .listStatements() .mapWith(Statement::asTriple) .mapWith(t -> new Quad(quadsURI.asNode(), t)) .forEachRemaining(dsg::add); try (OutputStream out = new FileOutputStream(quads.toFile())) { write(out, dsg, NQUADS); } final Dataset dataset = assemble(simpleExample); final Model assembledDefaultModel = dataset.getDefaultModel(); final Model assembledNamedModel = dataset.getNamedModel(quadsURI.getURI()); assertTrue(assembledDefaultModel.isEmpty()); assertTrue( assembledNamedModel.contains( assembledNamedModel.createStatement(simpleExample, data, quadsURI))); }
/** Run a single test of any sort, performing any appropriate logging and error reporting. */ public void runTest(Resource test) { System.out.println("Running " + test); boolean success = false; boolean fail = false; try { success = doRunTest(test); } catch (Exception e) { fail = true; System.out.print("\nException: " + e); e.printStackTrace(); } testCount++; if (success) { System.out.print((testCount % 40 == 0) ? ".\n" : "."); System.out.flush(); passCount++; } else { System.out.println("\nFAIL: " + test); } Resource resultType = null; if (fail) { resultType = OWLResults.FailingRun; } else { if (test.hasProperty(RDF.type, OWLTest.NegativeEntailmentTest) || test.hasProperty(RDF.type, OWLTest.ConsistencyTest)) { resultType = success ? OWLResults.PassingRun : OWLResults.FailingRun; } else { resultType = success ? OWLResults.PassingRun : OWLResults.IncompleteRun; } } // log to the rdf result format Resource result = testResults .createResource() .addProperty(RDF.type, OWLResults.TestRun) .addProperty(RDF.type, resultType) .addProperty(OWLResults.test, test) .addProperty(OWLResults.system, jena2); }
/** Load the premises or conclusions for the test, optional performing import processing. */ public Model getDoc(Resource test, Property docType, boolean processImports) throws IOException { if (processImports) { Model result = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, null); StmtIterator si = test.listProperties(docType); while (si.hasNext()) { String fname = si.nextStatement().getObject().toString() + ".rdf"; loadFile(fname, result); } return result; } else { return getDoc(test, docType); } }
public static ArrayList<RdfModel> processRDF(InputStream in) { Model model = ModelFactory.createDefaultModel(); ArrayList<RdfModel> result = new ArrayList<RdfModel>(); if (in != null) { model.read(in, "RDF/XML"); // Now, I only care these propties: has-title, year-of, full-name. All three of them must // exist' for (final ResIterator it = model.listSubjectsWithProperty(RdfPropertyList.p_hasTitle); it.hasNext(); ) { RdfModel rm = new RdfModel(); try { final Resource node = it.next().asResource(); // node is a resource which has title property rm.setHasTitle(node.getProperty(RdfPropertyList.p_hasTitle).getString()); StringBuilder authors = new StringBuilder(); StringBuilder dates = new StringBuilder(); for (final StmtIterator all_props = node.listProperties(); all_props.hasNext(); ) { try { Resource all_res = all_props.next().getObject().asResource(); StmtIterator fullnames = all_res.listProperties(RdfPropertyList.p_fullName); StmtIterator years = all_res.listProperties(RdfPropertyList.p_year); // Just for now I may have mutiple author or dates in a String, seperated by comma RdfProcess newprocess = new RdfProcess(); while (fullnames.hasNext()) { String fullname = newprocess.getValue(fullnames.next().getObject()); if (!fullname.equals("Invalid/Lack of Information")) { authors.append(fullname + " , "); } } while (years.hasNext()) { String year = newprocess.getValue(years.next().getObject()); if (!year.equals("Invalid/Lack of Information")) { dates.append(year + " , "); } } } catch (Exception e) { } } rm.setHasDate(dates.toString()); rm.setHasAuthor(authors.toString()); } catch (Exception e) { } result.add(rm); } } return result; }
/** * Helper. Adds to the target model a translation of the given RDF list with each element replaced * according to the map. */ private Resource mapList(Model target, Resource list, Map<Resource, Resource> map) { if (list.equals(RDF.nil)) { return RDF.nil; } else { Resource head = (Resource) list.getRequiredProperty(RDF.first).getObject(); Resource rest = (Resource) list.getRequiredProperty(RDF.rest).getObject(); Resource mapElt = target.createResource(); Resource mapHead = map.get(head); if (mapHead == null) mapHead = head; mapElt.addProperty(RDF.first, mapHead); mapElt.addProperty(RDF.rest, mapList(target, rest, map)); return mapElt; } }
@Test public void testKBtoWurcsSparqlTranslation() throws SparqlException { List<Translation> translations = Ebean.find(Translation.class).findList(); HashSet<String> resultList = new HashSet<>(); String ct = ""; for (Translation translation : translations) { System.out.println("id check " + translation.id + " ct " + translation.ct); if (translation.ct == null) continue; if (translation.structure.id > 0) { ct = translation.ct; GlycoSequenceToWurcsSelectSparql s = new GlycoSequenceToWurcsSelectSparql("glycoct"); SparqlEntity se = new SparqlEntity(); ct = StringUtils.chomp(ct); System.out.println("ct on top: " + ct); if (ct != null) { se.setValue( GlycoSequenceToWurcsSelectSparql.FromSequence, ct.replaceAll("\n", "\\\\n") .replaceAll("x\\(", "u\\(") .replaceAll("\\)x", "\\)u") .trim()); s.setSparqlEntity(se); logger.debug(s.getSparql()); Query query = QueryFactory.create(s.getSparql().replaceAll("null", "").replace("?Sequence", "")); System.out.println( "Id " + translation.structure.id + " Query: " + s.getSparql().replaceAll("null", "").replace("?Sequence", "")); QueryExecution qe = QueryExecutionFactory.sparqlService("http://test.ts.glytoucan.org/sparql", query); ResultSet rs = qe.execSelect(); List<SparqlEntity> results = new ArrayList<>(); HashSet<String> resultsList = new HashSet<>(); while (rs.hasNext()) { QuerySolution row = rs.next(); Iterator<String> columns = row.varNames(); SparqlEntity se2 = new SparqlEntity(); while (columns.hasNext()) { String column = columns.next(); RDFNode cell = row.get(column); if (cell.isResource()) { Resource resource = cell.asResource(); // do something maybe with the OntModel??? if (resource.isLiteral()) se.setValue(column, resource.asLiteral().getString()); else se.setValue(column, resource.toString()); } else if (cell.isLiteral()) { se.setValue(column, cell.asLiteral().getString()); } else if (cell.isAnon()) { se.setValue(column, "anon"); } else { se.setValue(column, cell.toString()); } } results.add(se); } for (SparqlEntity entity : results) { // System.out.println("results: " + entity.getValue("PrimaryId")); resultList.add( translation.structure.id + "\t" + entity.getValue("PrimaryId").toString()); } } } } for (String c : resultList) { System.out.println(c); } }
@Test public void directDataLinkForDefaultAndNamedGraphs() throws IOException { // first make a file of triples to load later final Model model = createDefaultModel(); final Path triples = createTempFile("simpleExample", ".nt"); final Resource triplesURI = model.createResource(triples.toFile().toURI().toString()); final Resource simpleExample = model.createResource("test:simpleExample"); simpleExample.addProperty(type, DatasetAssemblerVocab.tDatasetTxnMem); // add a default graph simpleExample.addProperty(data, triplesURI); // add a named graph final Resource namedGraphDef = model.createResource("test:namedGraphDef"); simpleExample.addProperty(pNamedGraph, namedGraphDef); final Resource namedGraphName = model.createResource("test:namedGraphExample"); namedGraphDef.addProperty(type, MemoryModel); namedGraphDef.addProperty(pGraphName, namedGraphName); namedGraphDef.addProperty(data, triplesURI); try (OutputStream out = new FileOutputStream(triples.toFile())) { write(out, model, NTRIPLES); } final Dataset dataset = assemble(simpleExample); final Model assembledDefaultModel = dataset.getDefaultModel(); final Model assembledNamedModel = dataset.getNamedModel(namedGraphName.getURI()); // we put the same triples in each model, so we check for the same triples in each model for (final Model m : new Model[] {assembledDefaultModel, assembledNamedModel}) { assertTrue(m.contains(simpleExample, pNamedGraph, namedGraphDef)); assertTrue(m.contains(namedGraphDef, pGraphName, namedGraphName)); assertTrue(m.contains(simpleExample, data, triplesURI)); } final Iterator<Node> graphNodes = dataset.asDatasetGraph().listGraphNodes(); assertTrue(graphNodes.hasNext()); assertEquals(namedGraphName.asNode(), graphNodes.next()); assertFalse(graphNodes.hasNext()); }
public Optional<Resource> getServiceFromCandidate(Resource candidate) { return Optional.ofNullable(candidate.getPropertyResourceValue(DarkData.candidateService)); }
public void buildModel(Model model) { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = null; try { builder = factory.newDocumentBuilder(); } catch (Exception e) { e.printStackTrace(); } Document document = null; try { document = builder.parse("dblp.xml"); } catch (SAXException | IOException e) { e.printStackTrace(); } document.getDocumentElement().normalize(); System.out.println("Root Element : " + document.getDocumentElement().getNodeName()); NodeList nodeList = document.getElementsByTagName("article"); System.out.println(nodeList.getLength()); for (int temp = 0; temp < nodeList.getLength(); temp++) { Node node = nodeList.item(temp); // Identifying the child tag of employee encountered if (node.getNodeType() == Node.ELEMENT_NODE) { Publication pub = new Publication(); pub.idkey = node.getAttributes().getNamedItem("key").getNodeValue(); pub.mdate = node.getAttributes().getNamedItem("mdate").getNodeValue(); NodeList childNodes = node.getChildNodes(); for (int j = 0; j < childNodes.getLength(); j++) { Node cNode = childNodes.item(j); if (cNode.getNodeType() == Node.ELEMENT_NODE) { String content = cNode.getLastChild().getTextContent().trim(); // replace all ' with space if (content.indexOf('\'') != -1) { content = content.replace("'", "''"); } switch (cNode.getNodeName()) { case "author": pub.author.add(content); break; case "title": pub.title = content; break; case "pages": pub.pages = content; break; case "year": pub.year = content; break; case "volume": pub.volume = content; break; case "journal": pub.journal = content; break; case "number": pub.numbers = content; break; case "url": pub.url = content; case "ee": pub.ee = content; break; } } } // end of for and build a complete publication Resource resource = model.createResource(NS + pub.title); Property key = model.createProperty(NS + "key"); Property mdate = model.createProperty(NS + "mdate"); Property author = model.createProperty(NS + "author"); Property pages = model.createProperty(NS + "pages"); Property year = model.createProperty(NS + "year"); Property volume = model.createProperty(NS + "volume"); Property journal = model.createProperty(NS + "journal"); Property number = model.createProperty(NS + "number"); Property url = model.createProperty(NS + "url"); Property ee = model.createProperty(NS + "ee"); resource .addProperty(key, pub.idkey, XSDDatatype.XSDstring) .addProperty(mdate, pub.mdate, XSDDatatype.XSDstring) .addProperty(author, pub.authorToString(), XSDDatatype.XSDstring) .addProperty(pages, pub.pages, XSDDatatype.XSDstring) .addProperty(year, pub.year, XSDDatatype.XSDstring) .addProperty(volume, pub.volume, XSDDatatype.XSDstring) .addProperty(journal, pub.journal, XSDDatatype.XSDstring) .addProperty(number, pub.numbers, XSDDatatype.XSDstring) .addProperty(url, pub.url, XSDDatatype.XSDstring) .addProperty(ee, pub.ee, XSDDatatype.XSDstring); } } }
@Test public void testKBtoWurcsSparql() throws SparqlException { List<Structure> structures = Ebean.find(Structure.class).findList(); HashSet<String> resultList = new HashSet<>(); String ct = ""; for (Structure structure : structures) { if (structure.id >= 7400) { if (structure.glycanst.startsWith("v--")) { structure.glycanst = structure.glycanst.replace("v--", "FreeEnd--"); } if (structure.glycanst.startsWith("FreenEnd")) { structure.glycanst = structure.glycanst.replace("FreenEnd", "FreeEnd"); } if (structure.glycanst.startsWith("FreeEnd?")) { structure.glycanst = structure.glycanst.replace("FreeEnd?", "FreeEnd--?"); } if (structure.glycanst.startsWith("<Gly") || structure.glycanst.contains("0.0000u")) { continue; } System.out.println(structure.getGlycanst()); BuilderWorkspace workspace = new BuilderWorkspace(new GlycanRendererAWT()); workspace.setNotation("cfg"); // cfgbw | uoxf | uoxfcol | text GlycanRenderer renderer = workspace.getGlycanRenderer(); org.eurocarbdb.application.glycanbuilder.Glycan glycan = org.eurocarbdb.application.glycanbuilder.Glycan.fromString(structure.glycanst.trim()); if (glycan != null) { ct = glycan.toGlycoCTCondensed(); System.out.println("this was the ct: " + ct); GlycoSequenceToWurcsSelectSparql s = new GlycoSequenceToWurcsSelectSparql("glycoct"); SparqlEntity se = new SparqlEntity(); ct = StringUtils.chomp(ct); se.setValue( GlycoSequenceToWurcsSelectSparql.FromSequence, ct.replaceAll("\n", "\\\\n") .replaceAll("x\\(", "u\\(") .replaceAll("\\)x", "\\)u") .trim()); s.setSparqlEntity(se); logger.debug(s.getSparql()); Query query = QueryFactory.create(s.getSparql().replaceAll("null", "").replace("?Sequence", "")); System.out.println( "Id " + structure.id + " Query: " + s.getSparql().replaceAll("null", "").replace("?Sequence", "")); QueryExecution qe = QueryExecutionFactory.sparqlService("http://test.ts.glytoucan.org/sparql", query); ResultSet rs = qe.execSelect(); List<SparqlEntity> results = new ArrayList<>(); HashSet<String> resultsList = new HashSet<>(); while (rs.hasNext()) { QuerySolution row = rs.next(); Iterator<String> columns = row.varNames(); SparqlEntity se2 = new SparqlEntity(); while (columns.hasNext()) { String column = columns.next(); RDFNode cell = row.get(column); if (cell.isResource()) { Resource resource = cell.asResource(); // do something maybe with the OntModel??? if (resource.isLiteral()) se.setValue(column, resource.asLiteral().getString()); else se.setValue(column, resource.toString()); } else if (cell.isLiteral()) { se.setValue(column, cell.asLiteral().getString()); } else if (cell.isAnon()) { se.setValue(column, "anon"); } else { se.setValue(column, cell.toString()); } } results.add(se); } for (SparqlEntity entity : results) { // System.out.println("results: " + entity.getValue("PrimaryId")); resultList.add(structure.id + "\t" + entity.getValue("PrimaryId").toString()); } } } } PrintWriter writer = null; try { writer = new PrintWriter( new OutputStreamWriter(new FileOutputStream("/tmp/HashSet.txt"), "UTF-8")); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } catch (FileNotFoundException e) { e.printStackTrace(); } for (String c : resultList) { System.out.println(c); writer.println(c); } }
public void writeVideosToRDF(String keyword) { String api_key = "AIzaSyCZO2nHBNMSGgRg4VHMZ9P8dWT0H23J-Fc"; String yt_url = "https://www.googleapis.com/youtube/v3/search?part=snippet&q=" + keyword + "&type=video&videoCaption=closedCaption&key=" + api_key + "&format=5&maxResults=10&v=2"; String line = "", stringArray; StringBuilder stringArrayBuilder = new StringBuilder(); String titleOfVideo; String description; String thumbnailURL; String videoId; Model model = ModelFactory.createDefaultModel(); try { URL url = new URL(yt_url); BufferedReader br = new BufferedReader(new InputStreamReader(url.openStream())); while ((line = br.readLine()) != null) { stringArrayBuilder = stringArrayBuilder.append(line); } stringArray = stringArrayBuilder.toString(); JSONObject nodeRoot = new JSONObject(stringArray); JSONArray jsonArray = (JSONArray) nodeRoot.get("items"); for (int i = 0; i < jsonArray.length(); i++) { JSONObject obj = jsonArray.getJSONObject(i); JSONObject snippet = (JSONObject) obj.get("snippet"); description = (String) snippet.get("description"); titleOfVideo = (String) snippet.get("title"); JSONObject thumbnails = (JSONObject) snippet.get("thumbnails"); JSONObject thumbnail = (JSONObject) thumbnails.get("high"); thumbnailURL = (String) thumbnail.get("url"); JSONObject id = (JSONObject) obj.get("id"); videoId = (String) id.get("videoId"); Resource video = model.createResource("video" + i); Property p1 = model.createProperty("title"); video.addProperty(p1, titleOfVideo); Property p2 = model.createProperty("description"); video.addProperty(p2, description); Property p3 = model.createProperty("thumbnail"); video.addProperty(p3, thumbnailURL); Property p4 = model.createProperty("id"); video.addProperty(p4, videoId); } FileOutputStream fos = new FileOutputStream(keyword + ".nt"); RDFDataMgr.write(fos, model, Lang.NTRIPLES); } catch (Exception ex) { ex.printStackTrace(); } }
/** * Main method: reads the Word document and extracts the information about entities. * * @throws IOException In case of error while reading the document. */ public void readGAMSODocument() throws IOException { // Read the document with POI and get the list of paragraphs XWPFDocument document = new XWPFDocument(new FileInputStream(GAMSO_DOCX)); List<XWPFParagraph> paragraphs = document.getParagraphs(); int paragraphNumber = 0; int paragraphStylingNumber = 0; int[] currentNumber = {0, 0, 0}; List<String> currentDescription = null; String currentLabel = null; // Creation of the concept scheme resource. gamsoCS = gamsoModel.createResource(GAMSO_BASE_URI + "gamso", SKOS.ConceptScheme); // Iteration through the document paragraphs logger.debug( "Document read from " + GAMSO_DOCX + ", starting to iterate through the paragraphs."); for (XWPFParagraph paragraph : paragraphs) { if (paragraph.getParagraphText() == null) continue; // skipping empty paragraphs paragraphNumber++; // Styling number will be strictly positive for headings and list elements (eg. bullet points) paragraphStylingNumber = (paragraph.getNumID() == null) ? 0 : paragraph.getNumID().intValue(); // Add the paragraph text to the CS description if its number corresponds if (descriptionIndexes.contains(paragraphNumber)) { // TODO normalize white spaces if (gamsoDescription == null) gamsoDescription = paragraph.getParagraphText(); else gamsoDescription += " " + paragraph.getParagraphText(); } if (LEVEL1_STYLING.equals(paragraph.getStyle())) { // The first headings are in the introduction: we skip those if (paragraphStylingNumber == 0) continue; // If paragraph has a number styling, we have a new level 1 activity currentNumber[2] = 0; // Because third number may have been modified by level 3 operations if (currentDescription != null) { // Previous description is complete: record in the model this.addActivityToModel(currentNumber, currentLabel, currentDescription); } currentNumber[0]++; currentNumber[1] = 0; currentDescription = new ArrayList<String>(); currentLabel = normalizeActivityName(paragraph); } else if (LEVEL2_STYLING.equals(paragraph.getStyle())) { // Start of a new level 2 activity currentNumber[2] = 0; // Record previous description (which exists since we are at level 2) in the model this.addActivityToModel(currentNumber, currentLabel, currentDescription); currentNumber[1]++; currentDescription = new ArrayList<String>(); currentLabel = normalizeActivityName(paragraph); // Strip code for 3.x activities } else { if (currentNumber[0] == 0) continue; // Skip paragraphs that are before the first activity // Not a heading, so part of a description String descriptionPart = normalizeDescriptionItem(paragraph, paragraphStylingNumber); if (descriptionPart.length() > 0) currentDescription.add(descriptionPart); // Transform bullet points of level 2 activities into level 3 activities if ((paragraphStylingNumber > 0) && (currentNumber[1] > 0)) { currentNumber[2]++; this.addActivityToModel(currentNumber, paragraph.getParagraphText().trim(), null); } } } // The last activity read has not been added to the model yet: we do it here this.addActivityToModel(currentNumber, currentLabel, currentDescription); document.close(); logger.debug("Iteration through the paragraphs finished, completing the Jena model."); // Add the properties of the concept scheme (the description is now complete) gamsoCS.addProperty(SKOS.notation, gamsoModel.createLiteral("GAMSO v1.0")); gamsoCS.addProperty( SKOS.prefLabel, gamsoModel.createLiteral( "Generic Activity Model for Statistical Organisations v 1.0", "en")); gamsoCS.addProperty(SKOS.scopeNote, gamsoModel.createLiteral(gamsoDescription, "en")); }
@Override public EntityDefinition open(Assembler a, Resource root, Mode mode) { String prologue = "PREFIX : <" + NS + "> PREFIX list: <http://jena.apache.org/ARQ/list#> "; Model model = root.getModel(); String qs1 = StrUtils.strjoinNL( prologue, "SELECT * {", " ?eMap :entityField ?entityField ;", " :map ?map ;", " :defaultField ?dftField .", " OPTIONAL {", " ?eMap :graphField ?graphField", " }", " OPTIONAL {", " ?eMap :langField ?langField", " }", " OPTIONAL {", " ?eMap :uidField ?uidField", " }", "}"); ParameterizedSparqlString pss = new ParameterizedSparqlString(qs1); pss.setIri("eMap", root.getURI()); Query query1 = QueryFactory.create(pss.toString()); QueryExecution qexec1 = QueryExecutionFactory.create(query1, model); ResultSet rs1 = qexec1.execSelect(); List<QuerySolution> results = ResultSetFormatter.toList(rs1); if (results.size() == 0) { Log.warn(this, "Failed to find a valid EntityMap for : " + root); throw new TextIndexException("Failed to find a valid EntityMap for : " + root); } if (results.size() != 1) { Log.warn(this, "Multiple matches for EntityMap for : " + root); throw new TextIndexException("Multiple matches for EntityMap for : " + root); } QuerySolution qsol1 = results.get(0); String entityField = qsol1.getLiteral("entityField").getLexicalForm(); String graphField = qsol1.contains("graphField") ? qsol1.getLiteral("graphField").getLexicalForm() : null; String langField = qsol1.contains("langField") ? qsol1.getLiteral("langField").getLexicalForm() : null; String defaultField = qsol1.contains("dftField") ? qsol1.getLiteral("dftField").getLexicalForm() : null; String uniqueIdField = qsol1.contains("uidField") ? qsol1.getLiteral("uidField").getLexicalForm() : null; Multimap<String, Node> mapDefs = HashMultimap.create(); Map<String, Analyzer> analyzerDefs = new HashMap<>(); Statement listStmt = root.getProperty(TextVocab.pMap); while (listStmt != null) { RDFNode n = listStmt.getObject(); if (!n.isResource()) { throw new TextIndexException("Text list node is not a resource : " + n); } Resource listResource = n.asResource(); if (listResource.equals(RDF.nil)) { break; // end of the list } Statement listEntryStmt = listResource.getProperty(RDF.first); if (listEntryStmt == null) { throw new TextIndexException("Text map list is not well formed. No rdf:first property"); } n = listEntryStmt.getObject(); if (!n.isResource()) { throw new TextIndexException("Text map list entry is not a resource : " + n); } Resource listEntry = n.asResource(); Statement fieldStatement = listEntry.getProperty(TextVocab.pField); if (fieldStatement == null) { throw new TextIndexException("Text map entry has no field property"); } n = fieldStatement.getObject(); if (!n.isLiteral()) { throw new TextIndexException("Text map entry field property has no literal value : " + n); } String field = n.asLiteral().getLexicalForm(); Statement predicateStatement = listEntry.getProperty(TextVocab.pPredicate); if (predicateStatement == null) { throw new TextIndexException("Text map entry has no predicate property"); } n = predicateStatement.getObject(); if (!n.isURIResource()) { throw new TextIndexException( "Text map entry predicate property has non resource value : " + n); } mapDefs.put(field, n.asNode()); Statement analyzerStatement = listEntry.getProperty(TextVocab.pAnalyzer); if (analyzerStatement != null) { n = analyzerStatement.getObject(); if (!n.isResource()) { throw new TextIndexException("Text map entry analyzer property is not a resource : " + n); } Resource analyzerResource = n.asResource(); Analyzer analyzer = (Analyzer) a.open(analyzerResource); analyzerDefs.put(field, analyzer); } // move on to the next element in the list listStmt = listResource.getProperty(RDF.rest); } // Primary field/predicate if (defaultField != null) { Collection<Node> c = mapDefs.get(defaultField); if (c.isEmpty()) throw new TextIndexException("No definition of primary field '" + defaultField + "'"); } EntityDefinition docDef = new EntityDefinition(entityField, defaultField); docDef.setGraphField(graphField); docDef.setLangField(langField); docDef.setUidField(uniqueIdField); for (String f : mapDefs.keys()) { for (Node p : mapDefs.get(f)) docDef.set(f, p); } for (String f : analyzerDefs.keySet()) { docDef.setAnalyzer(f, analyzerDefs.get(f)); } return docDef; }