@Test public void testPresentTotalCount() throws URISyntaxException { Integer totalResults = new Integer(17); Resource thisMetaPage = createMetadata(true, totalResults); Literal tr = thisMetaPage.getModel().createTypedLiteral(totalResults); assertTrue(thisMetaPage.hasProperty(OpenSearch.totalResults, tr)); }
private static List<Resource> equivalentTypes(Resource type) { List<Resource> types = new ArrayList<Resource>(); types.add(type); for (StmtIterator it = type.getModel().listStatements(ANY, OWL.equivalentClass, type); it.hasNext(); ) types.add(it.nextStatement().getSubject()); return types; }
@Test public void testTermbindsIncludesMetaproperties() throws URISyntaxException { Integer totalResults = null; Resource thisMetaPage = createMetadata(false, totalResults); for (Property p : expectedTermboundProperties) { Model model = thisMetaPage.getModel(); if (!model.contains(null, API.property, p)) { fail("term bindings should include " + model.shortForm(p.getURI())); } } }
@Test public void testInfoStamp() { String versionString = "E3.14159", commentString = "gloopSmurfale"; String resourceString = "_x eye:assumed 'ABC'; _x eye:checked 'DEF'; _x eye:version '%v'; _x eye:comment '%c'" .replaceAll("%v", versionString) .replaceAll("%c", commentString); InfoStamp i = new InfoStamp(resourceInModel(resourceString)); Calendar now = Calendar.getInstance(); Resource root = i.stamp(now); Model stamp = root.getModel(); Literal dateLiteral = ModelFactory.createDefaultModel().createTypedLiteral(now); String dateString = "'" + dateLiteral.getLexicalForm() + "'" + dateLiteral.getDatatypeURI(); String expectedFormat = "[eye:assumed 'ABC' & eye:checked 'DEF' & eye:dated <date>" + " & eye:comment '<comment>' & eye:version '<version>']"; String expectedString = expectedFormat .replaceAll("<date>", dateString) .replaceAll("<version>", versionString) .replaceAll("<comment>", commentString); Model expected = model(expectedString); assertIsoModels(expected, stamp); }
@Override public EntityDefinition open(Assembler a, Resource root, Mode mode) { String prologue = "PREFIX : <" + NS + "> PREFIX list: <http://jena.apache.org/ARQ/list#> "; Model model = root.getModel(); String qs1 = StrUtils.strjoinNL( prologue, "SELECT * {", " ?eMap :entityField ?entityField ;", " :map ?map ;", " :defaultField ?dftField .", " OPTIONAL {", " ?eMap :graphField ?graphField", " }", " OPTIONAL {", " ?eMap :langField ?langField", " }", " OPTIONAL {", " ?eMap :uidField ?uidField", " }", "}"); ParameterizedSparqlString pss = new ParameterizedSparqlString(qs1); pss.setIri("eMap", root.getURI()); Query query1 = QueryFactory.create(pss.toString()); QueryExecution qexec1 = QueryExecutionFactory.create(query1, model); ResultSet rs1 = qexec1.execSelect(); List<QuerySolution> results = ResultSetFormatter.toList(rs1); if (results.size() == 0) { Log.warn(this, "Failed to find a valid EntityMap for : " + root); throw new TextIndexException("Failed to find a valid EntityMap for : " + root); } if (results.size() != 1) { Log.warn(this, "Multiple matches for EntityMap for : " + root); throw new TextIndexException("Multiple matches for EntityMap for : " + root); } QuerySolution qsol1 = results.get(0); String entityField = qsol1.getLiteral("entityField").getLexicalForm(); String graphField = qsol1.contains("graphField") ? qsol1.getLiteral("graphField").getLexicalForm() : null; String langField = qsol1.contains("langField") ? qsol1.getLiteral("langField").getLexicalForm() : null; String defaultField = qsol1.contains("dftField") ? qsol1.getLiteral("dftField").getLexicalForm() : null; String uniqueIdField = qsol1.contains("uidField") ? qsol1.getLiteral("uidField").getLexicalForm() : null; Multimap<String, Node> mapDefs = HashMultimap.create(); Map<String, Analyzer> analyzerDefs = new HashMap<>(); Statement listStmt = root.getProperty(TextVocab.pMap); while (listStmt != null) { RDFNode n = listStmt.getObject(); if (!n.isResource()) { throw new TextIndexException("Text list node is not a resource : " + n); } Resource listResource = n.asResource(); if (listResource.equals(RDF.nil)) { break; // end of the list } Statement listEntryStmt = listResource.getProperty(RDF.first); if (listEntryStmt == null) { throw new TextIndexException("Text map list is not well formed. No rdf:first property"); } n = listEntryStmt.getObject(); if (!n.isResource()) { throw new TextIndexException("Text map list entry is not a resource : " + n); } Resource listEntry = n.asResource(); Statement fieldStatement = listEntry.getProperty(TextVocab.pField); if (fieldStatement == null) { throw new TextIndexException("Text map entry has no field property"); } n = fieldStatement.getObject(); if (!n.isLiteral()) { throw new TextIndexException("Text map entry field property has no literal value : " + n); } String field = n.asLiteral().getLexicalForm(); Statement predicateStatement = listEntry.getProperty(TextVocab.pPredicate); if (predicateStatement == null) { throw new TextIndexException("Text map entry has no predicate property"); } n = predicateStatement.getObject(); if (!n.isURIResource()) { throw new TextIndexException( "Text map entry predicate property has non resource value : " + n); } mapDefs.put(field, n.asNode()); Statement analyzerStatement = listEntry.getProperty(TextVocab.pAnalyzer); if (analyzerStatement != null) { n = analyzerStatement.getObject(); if (!n.isResource()) { throw new TextIndexException("Text map entry analyzer property is not a resource : " + n); } Resource analyzerResource = n.asResource(); Analyzer analyzer = (Analyzer) a.open(analyzerResource); analyzerDefs.put(field, analyzer); } // move on to the next element in the list listStmt = listResource.getProperty(RDF.rest); } // Primary field/predicate if (defaultField != null) { Collection<Node> c = mapDefs.get(defaultField); if (c.isEmpty()) throw new TextIndexException("No definition of primary field '" + defaultField + "'"); } EntityDefinition docDef = new EntityDefinition(entityField, defaultField); docDef.setGraphField(graphField); docDef.setLangField(langField); docDef.setUidField(uniqueIdField); for (String f : mapDefs.keys()) { for (Node p : mapDefs.get(f)) docDef.set(f, p); } for (String f : analyzerDefs.keySet()) { docDef.setAnalyzer(f, analyzerDefs.get(f)); } return docDef; }