public DataNode get(String name) { for (DataNode i : getChildren()) { if (i.getName().equals(name)) { return i; } } return null; }
/** * called after modifying the list. It checks that names within the list are unique, and that * every item has this item as its parent * * @param newItem */ private void checkConsistency(DataNode newItem) { if (members == null) { return; // nothing changed } Set<String> names = new HashSet<>(); for (DataNode item : members) { if (names.contains(item.getName())) { if (newItem != null) { throw new RuntimeException( "Found duplicate name: " + item.getName() + " when adding item: " + newItem.getName() + " to directory: " + getName()); } else { throw new RuntimeException( "Found duplicate name: " + item.getName() + " in parent: " + getName()); } } if (item.getParent() != this) { throw new RuntimeException( "Found item in this set which does not have this item as its parent: " + item.getName() + ". Its parent is: " + item.getParent().getName() + " and my name is : " + this.getName()); } names.add(item.getName()); } }
private void recalcHashes(DataNode item) throws IOException { if (item.dirty == null) { return; // not dirty, which means no children are dirty } // only directories have derived hashes if (item instanceof DirectoryNode) { DirectoryNode dirNode = (DirectoryNode) item; for (DataNode child : dirNode) { recalcHashes(child); } ByteArrayOutputStream bout = new ByteArrayOutputStream(); hashCalc.sort(dirNode.getChildren()); String newHash = hashCalc.calcHash(dirNode, bout); item.setHash(newHash); byte[] arrTriplets = bout.toByteArray(); blobStore.setBlob(newHash, arrTriplets); log.info( "recalcHashes: " + item.name + " children:" + dirNode.members.size() + " hash=" + newHash); } }