public String getLanguage(Resource attribute) { if (_model.contains(attribute, Vertere.language)) { return _model.getProperty(attribute, Vertere.language).getString(); } else { return null; } }
public String getGlue(Resource resource) { if (_model.contains(resource, Vertere.source_column_glue)) { return _model.getProperty(resource, Vertere.source_column_glue).getString(); } else { return ""; } }
public String getContainer(Resource identity) { if (_model.contains(identity, Vertere.container)) { return _model.getProperty(identity, Vertere.container).getString(); } else { return ""; } }
public Resource getAlternativeIdentity(Resource resource) { if (_model.contains(resource, Vertere.alternative_identity)) { return _model.getProperty(resource, Vertere.identity).getResource(); } else { return null; } }
public Resource getNestedUnder(Resource identity) { if (_model.contains(identity, Vertere.nest_under)) { return _model.getProperty(identity, Vertere.nest_under).getResource(); } else { return null; } }
int getSubstrStart(Resource resource) { if (_model.contains(resource, Vertere.substring_start)) { return _model.getProperty(resource, Vertere.substring_start).getInt(); } else { return -1; } }
public String getExpectedHeader() { if (_model.contains(_specResource, Vertere.expected_header)) { return _model.getProperty(_specResource, Vertere.expected_header).getString(); } else { return null; } }
public String getBaseUri(Resource resource) { if (_model.contains(resource, Vertere.base_uri)) { return _model.getProperty(resource, Vertere.base_uri).getString(); } else { return null; } }
public String getSalt(Resource resource) { if (_model.contains(resource, Vertere.salt)) { return _model.getProperty(resource, Vertere.salt).getString(); } else { return ""; } }
int getSubstrLength(Resource resource) { if (_model.contains(resource, Vertere.substring_length)) { return _model.getProperty(resource, Vertere.substring_length).getInt(); } else { return -1; } }
public Resource getSourceResource(Resource identity) { if (_model.contains(identity, Vertere.source_resource)) { return _model.getProperty(identity, Vertere.source_resource).getResource(); } else { return null; } }
public Resource getRelationshipObjectFrom(Resource relationship) { if (_model.contains(relationship, Vertere.object_from)) { return _model.getProperty(relationship, Vertere.object_from).getResource(); } else { return null; } }
public Resource getIdentity(Resource resource) { if (!_model.contains(resource, Vertere.identity)) { throw new RuntimeException( "Resource " + resource.getURI() + " does not contain an identity so cannot be created."); } return _model.getProperty(resource, Vertere.identity).getResource(); }
public String getRegexOutput(Resource resource) { if (_model.contains(resource, Vertere.regex_output)) { return _model.getProperty(resource, Vertere.regex_output).getString(); } else { return null; } }
public String getRegexMatch(Resource resource) { if (_model.contains(resource, Vertere.regex_match)) { return _model.getProperty(resource, Vertere.regex_match).getString(); } else { return null; } }
/** Answer the subset of <code>classes</code> which have no superclass in <code>m</code>. */ private static Set<Resource> selectRootClasses(Model m, Set<RDFNode> classes) { Set<Resource> roots = new HashSet<Resource>(); for (Iterator<RDFNode> it = classes.iterator(); it.hasNext(); ) { Resource type = (Resource) it.next(); if (!m.contains(type, RDFS.subClassOf, (RDFNode) null)) roots.add(type); } return roots; }
public Property getRelationshipProperty(Resource relationship) { if (_model.contains(relationship, Vertere.property)) { Resource resource = _model.getProperty(relationship, Vertere.property).getResource(); return ResourceFactory.createProperty(resource.getURI()); } else { return null; } }
public RDFDatatype getDatatype(Resource attribute) { if (_model.contains(attribute, Vertere.datatype)) { Resource resource = _model.getProperty(attribute, Vertere.datatype).getResource(); RDFDatatype datatype = new BaseDatatype(resource.getURI()); return datatype; } else { return null; } }
public RDFList getProcessingSteps(Resource identity) { if (_model.contains(identity, Vertere.process)) { Resource processResource = _model.getProperty(identity, Vertere.process).getResource(); RDFList processSteps = processResource.as(RDFList.class); return processSteps; } else { return null; } }
/** * Check that a predicate for which no shortnames are defined in name map still gets a term * binding in the metadata. */ @Test public void testTermBindingsCoverAllPredicates() throws URISyntaxException { Resource thisPage = ResourceFactory.createResource("elda:thisPage"); String pageNumber = "1"; Bindings cc = new Bindings(); URI reqURI = new URI(""); // EndpointDetails spec = new EndpointDetails() { @Override public boolean isListEndpoint() { return true; } @Override public boolean hasParameterBasedContentNegotiation() { return false; } }; EndpointMetadata em = new EndpointMetadata(spec, thisPage, pageNumber, cc, reqURI); // PrefixMapping pm = PrefixMapping.Factory.create().setNsPrefix("this", "http://example.com/root#"); Model toScan = ModelIOUtils.modelFromTurtle(":a <http://example.com/root#predicate> :b."); toScan.setNsPrefixes(pm); Resource predicate = toScan.createProperty("http://example.com/root#predicate"); Model meta = ModelFactory.createDefaultModel(); Resource exec = meta.createResource("fake:exec"); ShortnameService sns = new StandardShortnameService(); // APIEndpoint.Request r = new APIEndpoint.Request( new Controls(), reqURI, cc ); CompleteContext c = new CompleteContext(CompleteContext.Mode.PreferPrefixes, sns.asContext(), pm) .include(toScan); em.addTermBindings(toScan, meta, exec, c); @SuppressWarnings("unused") Map<String, String> termBindings = c.Do(); Resource tb = meta.listStatements(null, API.termBinding, Any).nextStatement().getResource(); assertTrue(meta.contains(tb, API.label, "this_predicate")); assertTrue(meta.contains(tb, API.property, predicate)); }
public int[] getSourceColumns(Resource resource) { if (_model.contains(resource, Vertere.source_column)) { Statement sourceColumn = _model.getProperty(resource, Vertere.source_column); return new int[] {sourceColumn.getInt()}; } else if (_model.contains(resource, Vertere.source_columns)) { Statement sourceColumns = _model.getProperty(resource, Vertere.source_columns); Resource listResource = sourceColumns.getResource(); RDFList list = listResource.as(RDFList.class); List<RDFNode> javalist = list.asJavaList(); int[] sourceColumnNumbers = new int[javalist.size()]; for (int i = 0; i < javalist.size(); i++) { RDFNode node = javalist.get(i); Literal value = node.asLiteral(); sourceColumnNumbers[i] = value.getInt(); } return sourceColumnNumbers; } else { return new int[0]; } }
@Test public void testTermbindsIncludesMetaproperties() throws URISyntaxException { Integer totalResults = null; Resource thisMetaPage = createMetadata(false, totalResults); for (Property p : expectedTermboundProperties) { Model model = thisMetaPage.getModel(); if (!model.contains(null, API.property, p)) { fail("term bindings should include " + model.shortForm(p.getURI())); } } }
int[] getColumns(Resource test) { if (_model.contains(test, Vertere.column)) { NodeIterator listObjectsOfProperty = _model.listObjectsOfProperty(test, Vertere.column); List<RDFNode> toList = listObjectsOfProperty.toList(); int[] columns = new int[toList.size()]; for (int i = 0; i < toList.size(); i++) { columns[i] = toList.get(i).asLiteral().getInt(); } return columns; } else { return new int[0]; } }
/** * Attempts to find the most plausible RDF type for a given property. * * @param property the property to get the type of * @return either owl:DatatypeProperty or owl:ObjectProperty */ private Resource getPropertyType(Resource property) { StmtIterator it = model.listStatements(property, RDFS.range, (RDFNode) null); if (it.hasNext()) { while (it.hasNext()) { Statement s = it.nextStatement(); RDFNode n = s.getObject(); if (n.canAs(Resource.class) && model.contains((Resource) n.as(Resource.class), RDF.type, OWL.Class)) { return OWL.ObjectProperty; } } } return OWL.DatatypeProperty; }
public RDFNode lookup(Resource identity, String sourceValue) { if (!_model.contains(identity, Vertere.lookup)) { return null; } Resource lookupResource = _model.getProperty(identity, Vertere.lookup).getResource(); NodeIterator listObjectsOfProperty = _model.listObjectsOfProperty(lookupResource, Vertere.lookup_entry); while (listObjectsOfProperty.hasNext()) { RDFNode entry = listObjectsOfProperty.next(); Resource asResource = entry.asResource(); String key = _model.getProperty(asResource, Vertere.lookup_key).getString(); if (sourceValue.equals(key)) { return _model.getProperty(asResource, Vertere.lookup_value).getObject(); } } return null; }
@Test public void testWriteTo() throws WebApplicationException, IllegalArgumentException, IOException { final Triple t = create(createURI("info:test"), createURI("property:test"), createURI("info:test")); final RdfStream rdfStream = new RdfStream(t).session(mockSession); byte[] result; try (ByteArrayOutputStream entityStream = new ByteArrayOutputStream(); ) { testProvider.writeTo( rdfStream, RdfStream.class, null, null, MediaType.valueOf("application/rdf+xml"), null, entityStream); result = entityStream.toByteArray(); } final Model postSerialization = createDefaultModel().read(new ByteArrayInputStream(result), null); assertTrue( "Didn't find our triple!", postSerialization.contains(postSerialization.asStatement(t))); }
@Override public boolean hasStatement(Statement statement) { if (statement == null) { return false; } Model graph = null; GraphConnection graphConnection = null; try { graphConnection = openGraph(); graph = graphConnection.getGraph(); graph.enterCriticalSection(Lock.READ); SimpleSelector selector = getJenaSelector(graph, statement); return graph.contains(selector.getSubject(), selector.getPredicate(), selector.getObject()); } finally { if (graph != null) { graph.leaveCriticalSection(); } if (graphConnection != null) { graphConnection.close(); } } }
public void run() { while (pelletListener.isDirty()) { // pipeOpen = false; try { pelletListener.setDirty(false); inferenceRounds++; log.info("Getting new inferences"); long startTime = System.currentTimeMillis(); LinkedList<ObjectPropertyStatementPattern> irpl = new LinkedList<ObjectPropertyStatementPattern>(); if (inferenceReceivingPatternAllowSet != null) { irpl.addAll(inferenceReceivingPatternAllowSet); } else { irpl.add(ObjectPropertyStatementPatternFactory.getPattern(null, null, null)); } if (reasonerConfiguration.getQueryForAllObjectProperties()) { pelletModel.enterCriticalSection(Lock.READ); try { ClosableIterator closeIt = pelletModel.listObjectProperties(); try { for (Iterator objPropIt = closeIt; objPropIt.hasNext(); ) { ObjectProperty objProp = (ObjectProperty) objPropIt.next(); if (!("http://www.w3.org/2002/07/owl#".equals(objProp.getNameSpace()))) { irpl.add(ObjectPropertyStatementPatternFactory.getPattern(null, objProp, null)); } } } finally { closeIt.close(); } } finally { pelletModel.leaveCriticalSection(); } deletedObjectProperties.enterCriticalSection(Lock.WRITE); try { ClosableIterator sit = deletedObjectProperties.listSubjects(); try { while (sit.hasNext()) { Resource subj = (Resource) sit.next(); irpl.add( ObjectPropertyStatementPatternFactory.getPattern( null, ResourceFactory.createProperty(subj.getURI()), null)); } } finally { sit.close(); } deletedObjectProperties.removeAll(); } finally { deletedObjectProperties.leaveCriticalSection(); } } if (reasonerConfiguration.getQueryForAllDatatypeProperties()) { pelletModel.enterCriticalSection(Lock.READ); try { ClosableIterator closeIt = pelletModel.listDatatypeProperties(); try { for (Iterator dataPropIt = closeIt; dataPropIt.hasNext(); ) { DatatypeProperty dataProp = (DatatypeProperty) dataPropIt.next(); if (!("http://www.w3.org/2002/07/owl#".equals(dataProp.getNameSpace()))) { // TODO: THIS WILL WORK, BUT NEED TO GENERALIZE THE PATTERN CLASSES irpl.add( ObjectPropertyStatementPatternFactory.getPattern(null, dataProp, null)); } } } finally { closeIt.close(); } } finally { pelletModel.leaveCriticalSection(); } deletedDataProperties.enterCriticalSection(Lock.WRITE); try { ClosableIterator sit = deletedDataProperties.listSubjects(); try { while (sit.hasNext()) { Resource subj = (Resource) sit.next(); irpl.add( ObjectPropertyStatementPatternFactory.getPattern( null, ResourceFactory.createProperty(subj.getURI()), null)); } } finally { sit.close(); } deletedDataProperties.removeAll(); } finally { deletedDataProperties.leaveCriticalSection(); } } int addCount = 0; int retractCount = 0; // force new reasoner (disabled) if (false && !reasonerConfiguration.isIncrementalReasoningEnabled()) { Model baseModel = pelletModel.getBaseModel(); pelletModel = ModelFactory.createOntologyModel(PelletReasonerFactory.THE_SPEC); pelletModel.getDocumentManager().setProcessImports(false); pelletModel.add(baseModel); } pelletModel.enterCriticalSection(Lock.WRITE); try { pelletModel.rebind(); pelletModel.prepare(); } finally { pelletModel.leaveCriticalSection(); } for (Iterator<ObjectPropertyStatementPattern> patIt = irpl.iterator(); patIt.hasNext(); ) { ObjectPropertyStatementPattern pat = patIt.next(); if (log.isDebugEnabled()) { String subjStr = (pat.getSubject() != null) ? pat.getSubject().getURI() : "*"; String predStr = (pat.getPredicate() != null) ? pat.getPredicate().getURI() : "*"; String objStr = (pat.getObject() != null) ? pat.getObject().getURI() : "*"; log.debug("Querying for " + subjStr + " : " + predStr + " : " + objStr); } Model tempModel = ModelFactory.createDefaultModel(); pelletModel.enterCriticalSection(Lock.READ); try { ClosableIterator ci = pelletModel.listStatements(pat.getSubject(), pat.getPredicate(), pat.getObject()); try { for (ClosableIterator i = ci; i.hasNext(); ) { Statement stmt = (Statement) i.next(); boolean reject = false; // this next part is only needed if we're using Jena's OWL reasoner instead of // actually using Pellet try { if ((((Resource) stmt.getObject()).equals(RDFS.Resource))) { reject = true; } else if ((((Resource) stmt.getSubject()).equals(OWL.Nothing))) { reject = true; } else if ((((Resource) stmt.getObject()).equals(OWL.Nothing))) { reject = true; } } catch (Exception e) { } if (!reject) { tempModel.add(stmt); boolean fullModelContainsStatement = false; fullModel.enterCriticalSection(Lock.READ); try { fullModelContainsStatement = fullModel.contains(stmt); } finally { fullModel.leaveCriticalSection(); } if (!fullModelContainsStatement) { // in theory we should be able to lock only the inference model, but I'm not // sure yet if Jena propagates the locking upward fullModel.enterCriticalSection(Lock.WRITE); closePipe(); try { inferenceModel.add(stmt); addCount++; } finally { openPipe(); fullModel.leaveCriticalSection(); } } } } } finally { ci.close(); } } finally { pelletModel.leaveCriticalSection(); } // now we see what's in the inference model that isn't in the temp model and remove it try { Queue<Statement> localRemovalQueue = new LinkedList<Statement>(); inferenceModel.enterCriticalSection(Lock.READ); try { ClosableIterator ci = inferenceModel.listStatements( pat.getSubject(), pat.getPredicate(), pat.getObject()); try { for (ClosableIterator i = ci; i.hasNext(); ) { Statement stmt = (Statement) i.next(); if (!tempModel.contains(stmt)) { localRemovalQueue.add(stmt); } } } finally { ci.close(); } } finally { inferenceModel.leaveCriticalSection(); } for (Iterator<Statement> i = localRemovalQueue.iterator(); i.hasNext(); ) { fullModel.enterCriticalSection(Lock.WRITE); closePipe(); try { retractCount++; inferenceModel.remove(i.next()); } finally { openPipe(); fullModel.leaveCriticalSection(); } } localRemovalQueue.clear(); } catch (Exception e) { log.error("Error getting inferences", e); } tempModel = null; } this.pelletListener.isConsistent = true; this.pelletListener.inErrorState = false; this.pelletListener.explanation = ""; if (log.isDebugEnabled()) { log.info("Added " + addCount + " statements entailed by assertions"); log.info("Retracted " + retractCount + " statements no longer entailed by assertions"); log.info( "Done getting new inferences: " + (System.currentTimeMillis() - startTime) / 1000 + " seconds"); } } catch (InconsistentOntologyException ioe) { this.pelletListener.isConsistent = false; String explanation = ((PelletInfGraph) pelletModel.getGraph()).getKB().getExplanation(); this.pelletListener.explanation = explanation; log.error(ioe); log.error(explanation); } catch (Exception e) { this.pelletListener.inErrorState = true; log.error("Exception during inference", e); } finally { pelletListener.endReasoning(); } } }
static CuisineCodeImpl getCuisineCode(Resource resource, Model model) throws JastorException { if (!model.contains(resource, RDF.type, CuisineCode.TYPE)) return null; return new CuisineCodeImpl(resource, model); }
static ExtendedUserBehaviourImpl getExtendedUserBehaviour(Resource resource, Model model) throws JastorException { if (!model.contains(resource, RDF.type, ExtendedUserBehaviour.TYPE)) return null; return new ExtendedUserBehaviourImpl(resource, model); }