/** Recursively outputs the contents of the given node. */ private static void dump(Node node) throws RepositoryException { // First output the node path System.out.println(node.getPath()); // Skip the virtual (and large!) jcr:system subtree if (node.getName().equals("jcr:system")) { return; } // Then output the properties PropertyIterator properties = node.getProperties(); while (properties.hasNext()) { Property property = properties.nextProperty(); if (property.getDefinition().isMultiple()) { // A multi-valued property, print all values Value[] values = property.getValues(); for (int i = 0; i < values.length; i++) { System.out.println(property.getPath() + " = " + values[i].getString()); } } else { // A single-valued property System.out.println(property.getPath() + " = " + property.getString()); } } // Finally output all the child nodes recursively NodeIterator nodes = node.getNodes(); while (nodes.hasNext()) { dump(nodes.nextNode()); } }
/** * A real node-function (using the node argument). Returns the next newer node of same type. Also * a nice example on the difference between core and bridge. */ public Object successor() { if (node == null) throw new IllegalArgumentException("successor is a node-function"); if (cloud != null) { log.debug("Using bridge (security restrictions will be honoured)"); NodeManager nm = node.getNodeManager(); NodeQuery q = nm.createQuery(); StepField field = q.getStepField(nm.getField("number")); q.setConstraint( q.createConstraint( field, FieldCompareConstraint.GREATER, Integer.valueOf(node.getNumber()))); q.addSortOrder(field, SortOrder.ORDER_ASCENDING); q.setMaxNumber(1); NodeIterator i = nm.getList(q).nodeIterator(); return i.hasNext() ? i.nextNode() : null; } else { log.debug("Using core."); throw new UnsupportedOperationException("Core implementation was dropped. See source code."); /* This is how it would go with core objects MMObjectBuilder builder = MMBase.getMMBase().getBuilder(node.getNodeManager().getName()); NodeSearchQuery query = new NodeSearchQuery(builder); StepField field = query.getField(builder.getField("number")); BasicFieldValueConstraint cons = new BasicFieldValueConstraint(field, node.getNumber()); cons.setOperator(FieldCompareConstraint.GREATER); query.setConstraint(cons); query.addSortOrder(field); query.setMaxNumber(1); try { java.util.Iterator<MMObjectNode> i = builder.getNodes(query).iterator(); return i.hasNext() ? i.next() : null; } catch (Exception e) { return null; } */ } }
/** * Evaluate a node-set object, returning an ArrayList of the node set. * * @param node the current node * @param env the variable environment * @return an array list of the nodes */ public Object evalObject(Node node, ExprEnvironment env) throws XPathException { NodeListImpl list = new NodeListImpl(); NodeIterator iter = _pattern.select(node, env); Node value = null; while ((value = iter.nextNode()) != null) list.add(value); return list; }
int[] getColumns(Resource test) { if (_model.contains(test, Vertere.column)) { NodeIterator listObjectsOfProperty = _model.listObjectsOfProperty(test, Vertere.column); List<RDFNode> toList = listObjectsOfProperty.toList(); int[] columns = new int[toList.size()]; for (int i = 0; i < toList.size(); i++) { columns[i] = toList.get(i).asLiteral().getInt(); } return columns; } else { return new int[0]; } }
public VNodeDefinition(Node node) throws RepositoryException { name = node.getProperty(JCR_NODETYPENAME).getString(); // do properties properties = new HashMap<String, VPropertyDefinitionI>(); childSuggestions = new HashMap<String, String>(); NodeIterator nodeIterator = node.getNodes(); while (nodeIterator.hasNext()) { Node definitionNode = nodeIterator.nextNode(); String nodeType = definitionNode.getProperty(AbstractProperty.JCR_PRIMARYTYPE).getString(); // do a property if (NT_PROPERTYDEFINITION.equals(nodeType)) { String propertyName = "*"; // default to wildcard name if (definitionNode.hasProperty(JCR_NAME)) { // only add non-autogenerated properties if (!definitionNode.getProperty(JCR_AUTOCREATED).getBoolean()) { propertyName = definitionNode.getProperty(JCR_NAME).getString(); properties.put(propertyName, new VPropertyDefinition(definitionNode)); } } else { // property with no name means this node can accept custom properties canAddProperties = true; } } // do a child suggestion if (NT_CHILDNODEDEFINITION.equals(nodeType)) { String childName = "*"; // only do well-defined childnodedefinitions with the following 2 jcr properties if (definitionNode.hasProperty(JCR_NAME) && definitionNode.hasProperty(JCR_DEFAULTPRIMARYTYPE)) { childSuggestions.put( definitionNode.getProperty(JCR_NAME).getString(), definitionNode.getProperty(JCR_DEFAULTPRIMARYTYPE).getString()); } } } // do supertypes supertypes = new HashSet<String>(); if (node.hasProperty(JCR_SUPERTYPES)) { for (Value value : node.getProperty(JCR_SUPERTYPES).getValues()) { supertypes.add(value.getString()); } } // set mixin status isMixin = node.hasProperty(JCR_ISMIXIN) && node.getProperty(JCR_ISMIXIN).getBoolean(); }
void unregisterNewNode(JCRNodeWrapper node) { if (!newNodes.isEmpty()) { newNodes.remove(node.getPath()); try { if (node.hasNodes()) { NodeIterator it = node.getNodes(); while (it.hasNext()) { unregisterNewNode((JCRNodeWrapper) it.next()); } } } catch (RepositoryException e) { logger.warn("Error unregistering new nodes", e); } } }
public RDFNode lookup(Resource identity, String sourceValue) { if (!_model.contains(identity, Vertere.lookup)) { return null; } Resource lookupResource = _model.getProperty(identity, Vertere.lookup).getResource(); NodeIterator listObjectsOfProperty = _model.listObjectsOfProperty(lookupResource, Vertere.lookup_entry); while (listObjectsOfProperty.hasNext()) { RDFNode entry = listObjectsOfProperty.next(); Resource asResource = entry.asResource(); String key = _model.getProperty(asResource, Vertere.lookup_key).getString(); if (sourceValue.equals(key)) { return _model.getProperty(asResource, Vertere.lookup_value).getObject(); } } return null; }
@Override public void parse(final Builder builder) throws IOException { builder.startDoc(token(filename)); final Stack<NodeIterator> stack = new Stack<NodeIterator>(); stack.push(new NodeIterator(root)); while (!stack.empty()) { final NodeIterator ni = stack.peek(); if (ni.more()) { final Node n = ni.curr(); if (n instanceof Element) { stack.push(new NodeIterator(n)); atts.reset(); final NamedNodeMap at = n.getAttributes(); for (int a = 0, as = at.getLength(); a < as; ++a) { final Attr att = (Attr) at.item(a); final byte[] k = token(att.getName()), v = token(att.getValue()); if (eq(k, XMLNS)) { builder.startNS(EMPTY, v); } else if (startsWith(k, XMLNSC)) { builder.startNS(ln(k), v); } else { atts.add(k, v); } } builder.startElem(token(n.getNodeName()), atts); } else if (n instanceof Text) { builder.text(new TokenBuilder(n.getNodeValue())); } else if (n instanceof Comment) { builder.comment(new TokenBuilder(n.getNodeValue())); } else if (n instanceof ProcessingInstruction) { builder.pi(new TokenBuilder(n.getNodeName() + ' ' + n.getNodeValue())); } ++nodes; } else { stack.pop(); if (stack.empty()) break; builder.endElem(token(stack.peek().curr().getNodeName())); } } builder.endDoc(); }
public static void buildDefinitions(Session session) { log.info("started building node definitions"); String nodeName = ""; try { allNodes = new HashMap<String, VNodeDefinition>(); Node rootNode = session.getNode("/jcr:system/jcr:nodeTypes"); NodeIterator nodeIterator = rootNode.getNodes(); while (nodeIterator.hasNext()) { Node node = nodeIterator.nextNode(); nodeName = node.getName(); VNodeDefinition vNodeDefinition = new VNodeDefinition(node); customizeDefinition(vNodeDefinition); // possibly customize this definition allNodes.put(nodeName, vNodeDefinition); } log.info("finished building nodes"); } catch (RepositoryException re) { log.warn("Could not build node definitions, died at " + nodeName, re); } finally { if (session != null) session.logout(); } }
public Graph neighbourhoodGraph(int nnodes[], int hops) { PrimaryHashMap<Integer, String> nodes; PrimaryHashMap<String, Integer> nodesReverse; try { File auxFile = File.createTempFile("graph-maps-" + System.currentTimeMillis(), "aux"); auxFile.deleteOnExit(); RecordManager recMan = RecordManagerFactory.createRecordManager(auxFile.getAbsolutePath()); nodes = recMan.hashMap("nodes"); nodesReverse = recMan.hashMap("nodesReverse"); } catch (IOException ex) { throw new Error(ex); } nodes.clear(); nodesReverse.clear(); WeightedArcSet list1 = new WeightedArcSet(); Int2IntAVLTreeMap map = new Int2IntAVLTreeMap(); IntSet set = new IntLinkedOpenHashSet(); int numIterators = 100; Constructor[] cons = WeightedArc.class.getDeclaredConstructors(); for (int i = 0; i < cons.length; i++) cons[i].setAccessible(true); for (int n : nnodes) map.put(n, 0); NodeIterator its[] = new NodeIterator[numIterators]; int itNum[] = new int[numIterators]; for (int n = 0; n < its.length; n++) { its[n] = nodeIterator(); itNum[n] = 0; } while (map.size() != 0) { Integer node = 0; for (int n = 0; n < its.length; n++) if (itNum[n] <= node) node = itNum[n]; node = map.tailMap(node).firstKey(); if (node == null) map.firstKey(); NodeIterator it = null; Integer aux1 = 0; int iit = 0; for (int n = 0; n < its.length; n++) { if (!its[n].hasNext()) { its[n] = nodeIterator(); itNum[n] = 0; } if (itNum[n] == node) { it = its[n]; aux1 = itNum[n]; iit = 0; break; } if (itNum[n] < node && itNum[n] >= aux1) { it = its[n]; aux1 = itNum[n]; iit = n; } } if (it == null) { its[0] = nodeIterator(); itNum[0] = 0; it = its[0]; } while (it != null && (aux1 = it.nextInt()) != null && aux1 >= 0 && aux1 < node) {} itNum[iit] = aux1 + 1; Integer aux2 = null; ArcLabelledNodeIterator.LabelledArcIterator suc = it.successors(); while ((aux2 = suc.nextInt()) != null && aux2 >= 0 && (aux2 < graph.numNodes())) try { if (commit++ % COMMIT_SIZE == 0) { try { nodes.getRecordManager().commit(); } catch (IOException e) { throw new Error(e); } try { nodesReverse.getRecordManager().commit(); } catch (IOException e) { throw new Error(e); } } if (!nodesReverse.containsKey(this.nodes.get(aux1))) { nodes.put(nodes.size(), this.nodes.get(aux1)); nodesReverse.put(this.nodes.get(aux1), nodesReverse.size()); } if (!nodesReverse.containsKey(this.nodes.get(aux2))) { nodes.put(nodes.size(), this.nodes.get(aux2)); nodesReverse.put(this.nodes.get(aux2), nodesReverse.size()); } int aaux1 = nodesReverse.get(this.nodes.get(aux1)); int aaux2 = nodesReverse.get(this.nodes.get(aux2)); WeightedArc arc1 = (WeightedArc) cons[0].newInstance(aaux1, aaux2, suc.label().getFloat()); list1.add(arc1); if (map.get(node) < hops) { if (!set.contains(aux1) && (map.get(aux1) == null || map.get(aux1) > map.get(node) + 1)) map.put(aux1.intValue(), map.get(node) + 1); if (!set.contains(aux2) && (map.get(aux2) == null || map.get(aux2) > map.get(node) + 1)) map.put(aux2.intValue(), map.get(node) + 1); } } catch (Exception ex) { ex.printStackTrace(); throw new Error(ex); } ArcLabelledNodeIterator.LabelledArcIterator anc = it.ancestors(); while ((aux2 = anc.nextInt()) != null && aux2 >= 0 && (aux2 < graph.numNodes())) try { if (commit++ % COMMIT_SIZE == 0) { try { nodes.getRecordManager().commit(); } catch (IOException e) { throw new Error(e); } try { nodesReverse.getRecordManager().commit(); } catch (IOException e) { throw new Error(e); } } if (!nodesReverse.containsKey(this.nodes.get(aux1))) { nodes.put(nodes.size(), this.nodes.get(aux1)); nodesReverse.put(this.nodes.get(aux1), nodesReverse.size()); } if (!nodesReverse.containsKey(this.nodes.get(aux2))) { nodes.put(nodes.size(), this.nodes.get(aux2)); nodesReverse.put(this.nodes.get(aux2), nodesReverse.size()); } int aaux1 = nodesReverse.get(this.nodes.get(aux1)); int aaux2 = nodesReverse.get(this.nodes.get(aux2)); WeightedArc arc1 = (WeightedArc) cons[0].newInstance(aaux2, aaux1, anc.label().getFloat()); list1.add(arc1); if (map.get(node) < hops) { if (!set.contains(aux1) && (map.get(aux1) == null || map.get(aux1) > map.get(node) + 1)) map.put(aux1.intValue(), map.get(node) + 1); if (!set.contains(aux2) && (map.get(aux2) == null || map.get(aux2) > map.get(node) + 1)) map.put(aux2.intValue(), map.get(node) + 1); } } catch (Exception ex) { ex.printStackTrace(); throw new Error(ex); } map.remove(node); set.add(node); } Graph newGraph = new Graph(list1.toArray(new WeightedArc[0])); newGraph.nodes.clear(); newGraph.nodesReverse.clear(); newGraph.nodes = nodes; newGraph.nodesReverse = nodesReverse; return newGraph; }
public ArcLabelledNodeIterator.LabelledArcIterator ancestors(int x) { NodeIterator iterator = advanceIterator(x); return iterator.ancestors(); }
public Label[] ancestorLabelArray(int x) { NodeIterator iterator = advanceIterator(x); return iterator.labelArray(); }
public int[] ancestorArray(int x) { NodeIterator iterator = advanceIterator(x); return iterator.successorArray(); }
public double instrength(int x) { NodeIterator iterator = advanceIterator(x); return iterator.instrength(); }
public int indegree(int x) { NodeIterator iterator = advanceIterator(x); return iterator.indegree(); }
/** * Get JSON map for a given resource by applying the river settings * * @param rs resource being processed * @param properties properties to be indexed * @param model model returned by the indexing query * @param getPropLabel if set to true all URI property values will be indexed as their label. The * label is taken as the value of one of the properties set in {@link #uriDescriptionList}. * @return map of properties to be indexed for res */ private Map<String, ArrayList<String>> getJsonMap( Resource rs, Set<Property> properties, Model model, boolean getPropLabel) { Map<String, ArrayList<String>> jsonMap = new HashMap<String, ArrayList<String>>(); ArrayList<String> results = new ArrayList<String>(); if (addUriForResource) { results.add("\"" + rs.toString() + "\""); jsonMap.put("http://www.w3.org/1999/02/22-rdf-syntax-ns#about", results); } Set<String> rdfLanguages = new HashSet<String>(); for (Property prop : properties) { NodeIterator niter = model.listObjectsOfProperty(rs, prop); String property = prop.toString(); results = new ArrayList<String>(); String lang; String currValue; while (niter.hasNext()) { RDFNode node = niter.next(); currValue = getStringForResult(node, getPropLabel); if (addLanguage) { if (node.isLiteral()) { lang = node.asLiteral().getLanguage(); if (!lang.isEmpty()) { rdfLanguages.add("\"" + lang + "\""); } } } String shortValue = currValue; int currLen = currValue.length(); // Unquote string if (currLen > 1) shortValue = currValue.substring(1, currLen - 1); // If either whiteMap does contains shortValue // or blackMap contains the value // skip adding it to the index boolean whiteMapCond = whiteMap.containsKey(property) && !whiteMap.get(property).contains(shortValue); boolean blackMapCond = blackMap.containsKey(property) && blackMap.get(property).contains(shortValue); if (whiteMapCond || blackMapCond) { continue; } if (normalizeObj.containsKey(shortValue)) { results.add("\"" + normalizeObj.get(shortValue) + "\""); } else { results.add(currValue); } } // Do not index empty properties if (results.isEmpty()) continue; if (normalizeProp.containsKey(property)) { property = normalizeProp.get(property); if (jsonMap.containsKey(property)) { jsonMap.get(property).addAll(results); } else { jsonMap.put(property, results); } } else { jsonMap.put(property, results); } } if (addLanguage) { if (rdfLanguages.isEmpty() && !language.isEmpty()) rdfLanguages.add(language); if (!rdfLanguages.isEmpty()) jsonMap.put("language", new ArrayList<String>(rdfLanguages)); } for (Map.Entry<String, String> it : normalizeMissing.entrySet()) { if (!jsonMap.containsKey(it.getKey())) { ArrayList<String> res = new ArrayList<String>(); res.add("\"" + it.getValue() + "\""); jsonMap.put(it.getKey(), res); } } return jsonMap; }
public static void validate(@Nullable Node original, Node n) { if (original != null) { validate(null, original); } if (Null.red) { trouble(original, n, new RuntimeException("red null!")); } if (Null.left != Null || Null.right != Null || Null.value != Null || Null.key != null) { trouble(original, n, new RuntimeException("corrupted null!")); } if (n.red) { trouble(original, n, new RuntimeException("red root!")); } NodeStack s = new NodeStack(); int blackCount = -1; for (NodeIterator it = new NodeIterator(s, n); it.hasNext(); ) { int index = it.stack.index; Node x = it.next(); if (x.key instanceof Node) { trouble(original, n, new RuntimeException("node key!")); } if (x.key == null) { trouble(original, n, new RuntimeException("null key!")); } if (x.value instanceof Node) { validate(original, (Node) x.value); } if (x.red && (x.left.red || x.right.red)) { trouble(original, n, new RuntimeException("red node has red child(ren)!")); } if (x.left == Node.Null && x.right == Node.Null) { int count = 0; for (int i = 0; i < index; ++i) { if (!s.array[i].red) { ++count; } } if (!x.red) { ++count; } if (blackCount == -1) { blackCount = count; } else if (count != blackCount) { trouble( original, n, new RuntimeException("inconsistent number of black nodes per paths to leaves!")); } } } }
@GET @Path("/{type}") @Produces({Utils.MEDIA_TYPE_APPLICATION_HAL_PLUS_JSON, MediaType.APPLICATION_JSON}) public Object getByType( @PathParam("workspace") String workspace, @PathParam("language") String language, @PathParam("type") String type, @QueryParam("nameContains") List<String> nameConstraints, @QueryParam("orderBy") String orderBy, @QueryParam("limit") int limit, @QueryParam("offset") int offset, @QueryParam("depth") int depth, @Context UriInfo context) { if (API.isQueryDisabled()) { APIExceptionMapper.LOGGER.debug("Types endpoint is disabled. Attempted query on " + type); return Response.status(Response.Status.NOT_FOUND).build(); } final String unescapedNodetype = Names.unescape(type); if (API.excludedNodeTypes.contains(unescapedNodetype)) { return Response.status(Response.Status.FORBIDDEN) .entity("'" + unescapedNodetype + "' is not available for querying.") .build(); } Session session = null; try { session = getSession(workspace, language); final QueryObjectModelFactory qomFactory = session.getWorkspace().getQueryManager().getQOMFactory(); final ValueFactory valueFactory = session.getValueFactory(); final Selector selector = qomFactory.selector(unescapedNodetype, SELECTOR_NAME); // language constraint: either jcr:language doesn't exist or jcr:language is current language Constraint constraint = qomFactory.or( qomFactory.not(qomFactory.propertyExistence(SELECTOR_NAME, Constants.JCR_LANGUAGE)), stringComparisonConstraint( qomFactory.propertyValue(SELECTOR_NAME, Constants.JCR_LANGUAGE), language, qomFactory, valueFactory)); // if we have passed "nameContains" query parameters, only return nodes which name contains // the specified terms if (nameConstraints != null && !nameConstraints.isEmpty()) { for (String name : nameConstraints) { final Comparison likeConstraint = qomFactory.comparison( qomFactory.nodeLocalName(SELECTOR_NAME), QueryObjectModelFactory.JCR_OPERATOR_LIKE, qomFactory.literal( valueFactory.createValue("%" + name + "%", PropertyType.STRING))); constraint = qomFactory.and(constraint, likeConstraint); } } Ordering[] orderings = null; // ordering deactivated because it currently doesn't work, probably due to a bug in // QueryServiceImpl if (Utils.exists(orderBy)) { if ("desc".equalsIgnoreCase(orderBy)) { orderings = new Ordering[] {qomFactory.descending(qomFactory.nodeLocalName(SELECTOR_NAME))}; } else { orderings = new Ordering[] {qomFactory.ascending(qomFactory.nodeLocalName(SELECTOR_NAME))}; } } final QueryObjectModel query = qomFactory.createQuery( selector, constraint, orderings, new Column[] {qomFactory.column(SELECTOR_NAME, null, null)}); if (limit > 0) { query.setLimit(limit); } query.setOffset(offset); final QueryResult queryResult = query.execute(); final NodeIterator nodes = queryResult.getNodes(); final List<JSONNode> result = new LinkedList<JSONNode>(); final Filter filter = Utils.getFilter(context); while (nodes.hasNext()) { final Node resultNode = nodes.nextNode(); if (filter.acceptChild(resultNode)) { JSONNode node = getFactory().createNode(resultNode, filter, depth); result.add(node); } } return Response.ok(result).build(); } catch (Exception e) { throw new APIException(e); } finally { closeSession(session); } }