public static Collection<RepositoryElement> getChildren( final RepositoryDirectory parent, final Collection<?> items) { List<RepositoryElement> elements = Lists.newArrayList( Iterators.transform( items.iterator(), new Function<Object, RepositoryElement>() { public RepositoryElement apply(Object from) { if (from instanceof BuildableItemWithBuildWrappers) { return new ProjectElement( parent, ((BuildableItemWithBuildWrappers) from).asProject()); } if (from instanceof MultiBranchProject) { return new MultiBranchProjectElement(parent, (MultiBranchProject) from); } if (from instanceof Job) { return new ProjectElement(parent, (Job) from); } return null; } })); // Squash ones we couldn't sensibly find an element for. return Collections2.filter( elements, new Predicate<RepositoryElement>() { @Override public boolean apply(RepositoryElement input) { return input != null; } }); }
@Test public void testReadMultiPage() throws IOException { onTableGet(basicTableSchema()); TableDataList page1 = rawDataList(rawRow("Row1", 1)).setPageToken("page2"); TableDataList page2 = rawDataList(rawRow("Row2", 2)).setTotalRows(2L); when(mockClient.tabledata()).thenReturn(mockTabledata); when(mockTabledata.list(anyString(), anyString(), anyString())).thenReturn(mockTabledataList); when(mockTabledataList.execute()).thenReturn(page1).thenReturn(page2); try (BigQueryTableRowIterator iterator = BigQueryTableRowIterator.of( mockClient, BigQueryIO.parseTableSpec("project:dataset.table"))) { List<String> names = new LinkedList<>(); Iterators.addAll( names, Iterators.transform( iterator, new Function<TableRow, String>() { @Override public String apply(TableRow input) { return (String) input.get("name"); } })); Assert.assertThat(names, Matchers.hasItems("Row1", "Row2")); verifyTableGet(); verifyTabledataList(); // The second call should have used a page token. verify(mockTabledataList).setPageToken("page2"); } }
@Override public void putAll(Iterator<? extends RevObject> objects, BulkOpListener listener) { // final List<RevCommit> addedCommits = Lists.newLinkedList(); final Iterator<? extends RevObject> collectingIterator = Iterators.transform( objects, (obj) -> { if (obj instanceof RevCommit) { final GraphDatabase graphDatabase = graphDb.get(); RevCommit commit = (RevCommit) obj; ObjectId commitId = commit.getId(); ImmutableList<ObjectId> parentIds = commit.getParentIds(); graphDatabase.put(commitId, parentIds); // addedCommits.add((RevCommit) input); } return obj; }); super.putAll(collectingIterator, listener); // if (!addedCommits.isEmpty()) { // GraphDatabase graphDatabase = graphDb.get(); // for (RevCommit commit : addedCommits) { // ObjectId commitId = commit.getId(); // ImmutableList<ObjectId> parentIds = commit.getParentIds(); // graphDatabase.put(commitId, parentIds); // } // } }
@Override public Iterator<byte[]> iterator() { if (isEmpty()) { return Iterators.emptyIterator(); } return Iterators.transform(entries.iterator(), ENTRY_TO_BYTE_ARRAY); }
public static <T> Iterator<T> mergeIterators( Iterator<? extends Iterator<? extends T>> iterators, final Comparator<? super T> comparator) { final List<PeekingIterator<? extends T>> peekingIterators = Lists.newArrayList( Iterators.transform( iterators, new Function<Iterator<? extends T>, PeekingIterator<? extends T>>() { public PeekingIterator<? extends T> apply(Iterator<? extends T> from) { return new PeekingIterator<T>(from); } })); return new AbstractIterator<T>() { protected T computeNext() { int bestIndex = -1; T bestElement = null; for (int i = 0; i < peekingIterators.size(); i++) { PeekingIterator<? extends T> it = peekingIterators.get(i); if (it.hasNext()) { T element = it.peek(); if (bestElement == null || comparator.compare(element, bestElement) < 0) { bestElement = element; bestIndex = i; } } } if (bestIndex == -1) { return endOfData(); } else { return peekingIterators.get(bestIndex).next(); } } }; }
@Test public void testReOrderingOfChildNodesWithRemovedChild() throws Exception { // GIVEN ((MockNode) baseNode).setPrimaryNodeType(new MockNodeType(NodeTypes.Content.NAME, null, true)); baseNode.addNode("child-a"); baseNode.addNode("child-b"); baseNode.addNode("child-c"); JcrNodeAdapter item = new JcrNodeAdapter(baseNode); item.removeChild(new JcrNodeAdapter(baseNode.getNode("child-c"))); item.addChild(new JcrNodeAdapter(baseNode.getNode("child-b"))); item.addChild(new JcrNodeAdapter(baseNode.getNode("child-a"))); // WHEN item.applyChanges(); // THEN List<String> nodes = Lists.newArrayList( Iterators.transform( item.getJcrItem().getNodes(), new Function<Node, String>() { @Nullable @Override public String apply(@Nullable Node node) { return NodeUtil.getName(node); } })); assertThat(nodes, contains("child-b", "child-a")); }
private Iterator<MessageForRender> getMessagesForId(FacesContext context, String clientId) { Iterator<MessageForRender> msgIter; msgIter = Iterators.transform( context.getMessages(clientId), new MessageTransformer(null == clientId ? "" : clientId)); return msgIter; }
@Override public Iterator<Blob> iterator() { return Iterators.transform( infoList.iterator(), new Function<BlobInfo, Blob>() { @Override public Blob apply(BlobInfo info) { return new Blob(storage, info); } }); }
// region Object support @Override public String toString() { Function<VectorEntry, String> label = new Function<VectorEntry, String>() { @Override public String apply(VectorEntry e) { return String.format("%d: %.3f", e.getKey(), e.getValue()); } }; return "{" + StringUtils.join(Iterators.transform(fastIterator(), label), ", ") + "}"; }
/** Returns an iterator of frequencyDist names */ private Iterator<String> freqNames(org.apache.commons.math.stat.Frequency freq) { return Iterators.transform( freq.valuesIterator(), new Function<Comparable<?>, String>() { @Override public String apply(Comparable<?> input) { return input.toString(); } }); }
@Override public Iterator<Integer> iterator() { return Iterators.transform( baseIndex.iterator(), new Function<Integer, Integer>() { @Override public Integer apply(@Nullable Integer input) { return conversionBuffer.get(input); } }); }
<T extends FramedElement> Iterator<T> frame( Iterator<? extends Element> pipeline, final Class<T> kind) { return Iterators.transform( pipeline, new Function<Element, T>() { @Override public T apply(Element element) { return frameElement(element, kind); } }); }
@Nullable public Set<String> routingValues() { if (clusteredBy.isPresent()) { HashSet<String> result = new HashSet<>(clusteredBy.get().size()); Iterators.addAll( result, Iterators.transform(clusteredBy.get().iterator(), ValueSymbolVisitor.STRING.function)); return result; } else { return null; } }
@Override public <T> Iterator<T> getServiceIterator( Iterable<ServiceReference> references, Class<T> serviceClass) { return Iterators.transform( references.iterator(), new Function<ServiceReference, T>() { @SuppressWarnings("unchecked") @Override public T apply(ServiceReference input) { return (T) bundleContext.getService(input); } }); }
@Override public Iterator<Tree<T>> treeIterator() { Iterator<Iterator<Tree<T>>> treeIterators = Iterators.transform( chld.iterator(), new Function<Tree<T>, Iterator<Tree<T>>>() { @Override public Iterator<Tree<T>> apply(Tree<T> input) { return input.treeIterator(); } }); return Iterators.concat(treeIterators); }
/** * Query over a list of edges in the graph. * * @param ids The ids of the edges. * @return The query. */ public EdgeTraversal<?, ?, ?> e(final Collection<?> ids) { return new TraversalImpl( this, Iterators.transform( ids.iterator(), new Function<Object, Edge>() { @Override public Edge apply(Object id) { return delegate.getEdge(id); } })) .castToEdges(); }
/** * Query over a list of vertices in the graph. * * @param ids The ids of the vertices. * @return The query. */ public VertexTraversal<?, ?, ?> v(final Object... ids) { return new TraversalImpl( this, Iterators.transform( Iterators.forArray(ids), new Function<Object, Vertex>() { @Override public Vertex apply(Object id) { return delegate.getVertex(id); } })) .castToVertices(); }
private Iterator<Feature> transformIterator( Iterator<NodeRef> nodeIterator, final RevFeatureType newFeatureType) { Iterator<Feature> iterator = Iterators.transform( nodeIterator, new Function<NodeRef, Feature>() { @Override public Feature apply(NodeRef node) { return alter(node, newFeatureType); } }); return iterator; }
@Override public Iterator<Item> iterator() { try { return Iterators.transform( this.data.iterate(this.tbl, this.conds).iterator(), new Function<Attributes, Item>() { @Override public Item apply(final Attributes input) { return new MkItem(MkFrame.this.data, MkFrame.this.tbl, input); } }); } catch (final IOException ex) { throw new IllegalStateException(ex); } }
@Override public Iterator<MatrixSlice> iterateAll() { try { return Iterators.transform( new SequenceFileDirIterator<IntWritable, VectorWritable>( new Path(rowPath, "*"), PathType.GLOB, PathFilters.logsCRCFilter(), null, true, conf), new Function<Pair<IntWritable, VectorWritable>, MatrixSlice>() { @Override public MatrixSlice apply(Pair<IntWritable, VectorWritable> from) { return new MatrixSlice(from.getSecond().get(), from.getFirst().get()); } }); } catch (IOException ioe) { throw new IllegalStateException(ioe); } }
@Override protected Iterator<DataElement> getInputSet() throws ResourceInitializationException { String filename = System.getProperty(DIR_PROPERTY); try { if (filename == null) { Iterator<Resource> files = getFilesFromParams(); return iterator(files); } else { File file = new File(filename); String[] files = file.list(); return iterator( Iterators.transform(Iterators.forArray(files), new StringToResourceFunction(""))); } } catch (IOException e) { throw new ResourceInitializationException(e); } }
@Test public void testBindingVars() throws Exception { final Binding binding = testObj.nextBinding(); final List<String> vars = ImmutableList.copyOf( Iterators.transform( binding.vars(), new Function<Var, String>() { @Override public String apply(final Var var) { return var.getVarName(); } })); assertArrayEquals(columnNames, vars.toArray()); assertFalse(binding.isEmpty()); }
@Override public void runInternal(GeogitCLI cli) throws Exception { checkState(cli.getGeogit() != null, "Not a geogit repository: " + cli.getPlatform().pwd()); String ref; if (refList.isEmpty()) { ref = null; } else { ref = refList.get(0); } Iterator<RevObject> iter = cli.getGeogit() // .command(WalkGraphOp.class) .setReference(ref) // // .setStrategy(lsStrategy) // .call(); final ConsoleReader console = cli.getConsole(); if (!iter.hasNext()) { if (ref == null) { console.println("The working tree is empty"); } else { console.println("The specified path is empty"); } return; } Function<RevObject, CharSequence> printFunctor = new Function<RevObject, CharSequence>() { @Override public CharSequence apply(RevObject input) { if (verbose) { return String.format("%s: %s %s", input.getId(), input.getType(), input); } else { return String.format("%s: %s", input.getId(), input.getType()); } } }; Iterator<CharSequence> lines = Iterators.transform(iter, printFunctor); while (lines.hasNext()) { console.println(lines.next()); } console.flush(); }
/** * This method returns an HTTP response with content body appropriate to the following arguments. * * @param rangeValue starting and ending byte offsets, see {@link Range} * @param limit is the number of child resources returned in the response, -1 for all * @param rdfStream to which response RDF will be concatenated * @return HTTP response * @throws IOException */ protected Response getContent(final String rangeValue, final int limit, final RdfStream rdfStream) throws IOException { if (resource() instanceof FedoraBinary) { final String contentTypeString = ((FedoraBinary) resource()).getMimeType(); final Lang lang = contentTypeToLang(contentTypeString); if (!contentTypeString.equals("text/plain") && lang != null) { final String format = lang.getName().toUpperCase(); final InputStream content = ((FedoraBinary) resource()).getContent(); final Model inputModel = createDefaultModel().read(content, (resource()).toString(), format); rdfStream.concat(Iterators.transform(inputModel.listStatements(), Statement::asTriple)); } else { final MediaType mediaType = MediaType.valueOf(contentTypeString); if (MESSAGE_EXTERNAL_BODY.isCompatible(mediaType) && mediaType.getParameters().containsKey("access-type") && mediaType.getParameters().get("access-type").equals("URL") && mediaType.getParameters().containsKey("URL")) { try { return temporaryRedirect(new URI(mediaType.getParameters().get("URL"))).build(); } catch (final URISyntaxException e) { throw new RepositoryRuntimeException(e); } } return getBinaryContent(rangeValue); } } else { rdfStream.concat(getResourceTriples(limit)); if (prefer != null) { prefer.getReturn().addResponseHeaders(servletResponse); } } servletResponse.addHeader("Vary", "Accept, Range, Accept-Encoding, Accept-Language"); return ok(rdfStream).build(); }
public void rebindPartialActive( CompoundTransformer transformer, Iterator<BrooklynObject> objectsToRebind) { final ClassLoader classLoader = managementContext.getCatalogClassLoader(); // TODO we might want different exception handling for partials; // failure at various points should leave proxies in a sensible state, // either pointing at old or at new, though this is relatively untested, // and some things e.g. policies might not be properly started final RebindExceptionHandler exceptionHandler = RebindExceptionHandlerImpl.builder() .danglingRefFailureMode(danglingRefFailureMode) .danglingRefQuorumRequiredHealthy(danglingRefsQuorumRequiredHealthy) .rebindFailureMode(rebindFailureMode) .addConfigFailureMode(addConfigFailureMode) .addPolicyFailureMode(addPolicyFailureMode) .loadPolicyFailureMode(loadPolicyFailureMode) .build(); final ManagementNodeState mode = getRebindMode(); ActivePartialRebindIteration iteration = new ActivePartialRebindIteration( this, mode, classLoader, exceptionHandler, rebindActive, readOnlyRebindCount, rebindMetrics, persistenceStoreAccess); iteration.setObjectIterator( Iterators.transform( objectsToRebind, new Function<BrooklynObject, BrooklynObject>() { @Override public BrooklynObject apply(BrooklynObject obj) { // entities must be deproxied if (obj instanceof Entity) obj = Entities.deproxy((Entity) obj); return obj; } })); if (transformer != null) iteration.applyTransformer(transformer); iteration.run(); }
private Iterator<SimpleFeature> alter( Iterator<SimpleFeature> plainFeatures, final ObjectId targetFeatureTypeId) { final RevFeatureType targetType = objectDatabase().getFeatureType(targetFeatureTypeId); Function<SimpleFeature, SimpleFeature> alterFunction = new Function<SimpleFeature, SimpleFeature>() { @Override public SimpleFeature apply(SimpleFeature input) { final RevFeatureType oldFeatureType; oldFeatureType = (RevFeatureType) input.getUserData().get(RevFeatureType.class); final ObjectId metadataId = oldFeatureType.getId(); if (targetType.getId().equals(metadataId)) { return input; } final RevFeature oldFeature; oldFeature = (RevFeature) input.getUserData().get(RevFeature.class); ImmutableList<PropertyDescriptor> oldAttributes = oldFeatureType.sortedDescriptors(); ImmutableList<PropertyDescriptor> newAttributes = targetType.sortedDescriptors(); ImmutableList<Optional<Object>> oldValues = oldFeature.getValues(); List<Optional<Object>> newValues = Lists.newArrayList(); for (int i = 0; i < newAttributes.size(); i++) { int idx = oldAttributes.indexOf(newAttributes.get(i)); if (idx != -1) { Optional<Object> oldValue = oldValues.get(idx); newValues.add(oldValue); } else { newValues.add(Optional.absent()); } } RevFeature newFeature = RevFeatureImpl.build(ImmutableList.copyOf(newValues)); FeatureBuilder featureBuilder = new FeatureBuilder(targetType); SimpleFeature feature = (SimpleFeature) featureBuilder.build(input.getID(), newFeature); return feature; } }; return Iterators.transform(plainFeatures, alterFunction); }
@Override protected Iterable<Text> getLines(int page) throws CommandException { if (!this.countIterator.hasNext()) { throw new CommandException(t("Already at end of iterator")); } if (page <= this.lastPage) { throw new CommandException(t("Cannot go backward in an IterablePagination")); } else if (page > this.lastPage + 1) { getLines(page - 1); } this.lastPage = page; if (getMaxContentLinesPerPage() <= 0) { return Lists.newArrayList( Iterators.transform( this.countIterator, new Function<Map.Entry<Text, Integer>, Text>() { @Nullable @Override public Text apply(Map.Entry<Text, Integer> input) { return input.getKey(); } })); } List<Text> ret = new ArrayList<>(getMaxContentLinesPerPage()); int addedLines = 0; while (addedLines <= getMaxContentLinesPerPage()) { if (!this.countIterator.hasNext()) { break; } if (addedLines + this.countIterator.peek().getValue() > getMaxContentLinesPerPage()) { break; } Map.Entry<Text, Integer> ent = this.countIterator.next(); ret.add(ent.getKey()); addedLines += ent.getValue(); } return ret; }
@SuppressWarnings("unchecked") private Iterator<Node> getBlankNodesIterator() throws RepositoryException { final Iterator<Property> properties = resource().getNode().getProperties(); final Iterator<Property> references = Iterators.filter( properties, uncheck((final Property p) -> REFERENCE_TYPES.contains(p.getType()))::test); final Iterator<Node> nodes = Iterators.transform( new PropertyValueIterator(references), UncheckedFunction.uncheck( (final Value v) -> v.getType() == PATH ? session().getNode(v.getString()) : session().getNodeByIdentifier(v.getString())) ::apply); return Iterators.filter(nodes, isSkolemNode::test); }
@Override public Iterator<Rowboat> iterator() { return Iterators.transform( index.iterator(), new Function<Rowboat, Rowboat>() { int rowCount = 0; @Override public Rowboat apply(@Nullable Rowboat input) { int[][] dims = input.getDims(); int[][] newDims = new int[convertedDims.size()][]; for (int i = 0; i < convertedDims.size(); ++i) { IntBuffer converter = converters.get(convertedDims.get(i)); if (converter == null) { continue; } if (i >= dims.length || dims[i] == null) { continue; } newDims[i] = new int[dims[i].length]; for (int j = 0; j < dims[i].length; ++j) { if (!converter.hasRemaining()) { log.error("Converter mismatch! wtfbbq!"); } newDims[i][j] = converter.get(dims[i][j]); } } final Rowboat retVal = new Rowboat(input.getTimestamp(), newDims, input.getMetrics(), input.getRowNum()); retVal.addRow(indexNumber, input.getRowNum()); return retVal; } }); }
@Override protected RevCommit read(ObjectId id, BufferedReader reader, TYPE type) throws IOException { Preconditions.checkArgument(TYPE.COMMIT.equals(type), "Wrong type: %s", type.name()); String tree = parseLine(requireLine(reader), "tree"); List<String> parents = Lists.newArrayList( Splitter.on(' ') .omitEmptyStrings() .split(parseLine(requireLine(reader), "parents"))); RevPerson author = parsePerson(requireLine(reader), "author"); RevPerson committer = parsePerson(requireLine(reader), "committer"); String message = parseMessage(reader); CommitBuilder builder = new CommitBuilder(); builder.setAuthor(author.getName().orNull()); builder.setAuthorEmail(author.getEmail().orNull()); builder.setAuthorTimestamp(author.getTimestamp()); builder.setAuthorTimeZoneOffset(author.getTimeZoneOffset()); builder.setCommitter(committer.getName().orNull()); builder.setCommitterEmail(committer.getEmail().orNull()); builder.setCommitterTimestamp(committer.getTimestamp()); builder.setCommitterTimeZoneOffset(committer.getTimeZoneOffset()); builder.setMessage(message); List<ObjectId> parentIds = Lists.newArrayList( Iterators.transform( parents.iterator(), new Function<String, ObjectId>() { @Override public ObjectId apply(String input) { ObjectId objectId = ObjectId.valueOf(input); return objectId; } })); builder.setParentIds(parentIds); builder.setTreeId(ObjectId.valueOf(tree)); RevCommit commit = builder.build(); return commit; }