public static <A, B> Stream<Pair<A, B>> zip(Stream<A> s1, Stream<B> s2, int size) { PairIterator<A, B, Pair<A, B>> itr = new PairIterator<>(s1.iterator(), s2.iterator(), Pair<A, B>::new); int characteristics = Spliterator.IMMUTABLE | Spliterator.NONNULL; if (size < 0) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(itr, characteristics), false); } return StreamSupport.stream(Spliterators.spliterator(itr, size, characteristics), false); }
public Stream<T> parallelStream() { BlockingIterable.SubscriberIterator<T> it = createIterator(); source.subscribe(it); Spliterator<T> sp = Spliterators.spliteratorUnknownSize(it, 0); return StreamSupport.stream(sp, true).onClose(it); }
public Spliterator<V> spliterator() { return Spliterators.spliterator( this, Spliterator.ORDERED | Spliterator.DISTINCT | Spliterator.IMMUTABLE | Spliterator.NONNULL | Spliterator.SIZED); }
@Override public Iterator<Bindings> iterator() { return StreamSupport.stream( Spliterators.spliteratorUnknownSize( iterator(skip != null ? skip : 0, limit != null ? limit : 0), 0), false) .map(v -> (Bindings) new SimpleBindings(v)) .iterator(); }
public Stream<Node> nodeStream(boolean includeInlinedNodes) { Iterator<Node> iterator; TruffleInlining inliner = getInlining(); if (includeInlinedNodes && inliner != null) { iterator = inliner.makeNodeIterator(this); } else { iterator = NodeUtil.makeRecursiveIterator(this.getRootNode()); } return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, 0), false); }
private Stream<String> parseToLine(InputStream is) { CsvScanner scanner = new CsvScanner( is, formatter.getCharset(), formatter.getQuoteChar(), formatter.getEscapeChar()); int characteristic = Spliterator.ORDERED | Spliterator.NONNULL | Spliterator.IMMUTABLE; Spliterator<String> spliterator = Spliterators.spliteratorUnknownSize(new CsvLineIterator(scanner), characteristic); RunnableUtil.ThrowingRunnable<IOException> close = scanner::close; return StreamSupport.stream(spliterator, false) .onClose(close.ignoreThrown(UncheckedIOException::new)); }
private <T> List<T> getAll(Class<T> clazz, String elasticType) { String query = "{\n" + " \"query\": {\n" + " \"match_all\": {}\n" + " },\n" + " \"size\": " + Integer.MAX_VALUE + "\n" + "}"; SearchResult searchResult = performSearchOnType(query, elasticType); return StreamSupport.stream( Spliterators.spliteratorUnknownSize( searchResult.getHits(clazz).iterator(), Spliterator.ORDERED), false) .map(hitResult -> hitResult.source) .collect(Collectors.toList()); }
public Spliterator<E> trySplit() { Node<E> p; final ConcurrentLinkedQueue<E> q = this.queue; int b = batch; int n = (b <= 0) ? 1 : (b >= MAX_BATCH) ? MAX_BATCH : b + 1; if (!exhausted && ((p = current) != null || (p = q.first()) != null) && p.next != null) { Object[] a = new Object[n]; int i = 0; do { if ((a[i] = p.item) != null) ++i; if (p == (p = p.next)) p = q.first(); } while (p != null && i < n); if ((current = p) == null) exhausted = true; if (i > 0) { batch = i; return Spliterators.spliterator( a, 0, i, Spliterator.ORDERED | Spliterator.NONNULL | Spliterator.CONCURRENT); } } return null; }
private static <T> Stream<T> iterToStream(Iterator<T> iter) { Spliterator<T> splititer = Spliterators.spliteratorUnknownSize(iter, Spliterator.IMMUTABLE); return StreamSupport.stream(splititer, false); }
@Override default Spliterator<T> spliterator() { // the focus of the Stream API is on random-access collections of *known size* return Spliterators.spliterator( iterator(), length(), Spliterator.ORDERED | Spliterator.IMMUTABLE); }
@Override public Spliterator.OfLong spliterator() { return Spliterators.spliteratorUnknownSize(iterator(), 0); }
/** * Returns the encoded output stream of the underlying {@link Stream}'s encoder. * * @return the encoded output stream. */ public Stream<int[]> getOutputStream() { if (isTerminal()) { throw new IllegalStateException("Stream is already \"terminal\" (operated upon or empty)"); } final MultiEncoder encoder = (MultiEncoder) getEncoder(); if (encoder == null) { throw new IllegalStateException( "setLocalParameters(Parameters) must be called before calling this method."); } // Protect outputStream formation and creation of "fan out" also make sure // that no other thread is trying to update the fan out lists Stream<int[]> retVal = null; try { criticalAccessLock.lock(); final String[] fieldNames = getFieldNames(); final FieldMetaType[] fieldTypes = getFieldTypes(); if (outputStream == null) { if (indexFieldMap.isEmpty()) { for (int i = 0; i < fieldNames.length; i++) { indexFieldMap.put(fieldNames[i], i); } } // NOTE: The "inputMap" here is a special local implementation // of the "Map" interface, overridden so that we can access // the keys directly (without hashing). This map is only used // for this use case so it is ok to use this optimization as // a convenience. if (inputMap == null) { inputMap = new InputMap(); inputMap.fTypes = fieldTypes; } final boolean isParallel = delegate.getInputStream().isParallel(); output = new ArrayList<>(); outputStream = delegate .getInputStream() .map( l -> { String[] arr = (String[]) l; inputMap.arr = arr; return input(arr, fieldNames, fieldTypes, output, isParallel); }); mainIterator = outputStream.iterator(); } LinkedList<int[]> l = new LinkedList<int[]>(); fanOuts.add(l); Copy copy = new Copy(l); retVal = StreamSupport.stream( Spliterators.spliteratorUnknownSize( copy, Spliterator.ORDERED | Spliterator.NONNULL | Spliterator.IMMUTABLE), false); } catch (Exception e) { e.printStackTrace(); } finally { criticalAccessLock.unlock(); } return retVal; }
private static Stream<String[]> fileToStream(String filePath) throws FileNotFoundException { com.opencsv.CSVReader reader = new com.opencsv.CSVReader(new FileReader(filePath)); Iterator<String[]> iterator = reader.iterator(); return StreamSupport.stream( Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false); }
@Override public final PrimitiveIterator.OfLong iterator() { return Spliterators.iterator(spliterator()); }
@Override public Spliterator<T> spliterator() { return Spliterators.spliterator( iterator(), length(), Spliterator.ORDERED | Spliterator.IMMUTABLE); }
@Override public Spliterator<E> spliterator() { return Spliterators.emptySpliterator(); }
public Stream<U> stream() { return StreamSupport.stream( Spliterators.spliteratorUnknownSize(reversedIterator(), Spliterator.ORDERED), false); }
@Override public Stream<List<T>> stream() { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator(), 0), false); }
public Stream<Node> stream() { return StreamSupport.stream( Spliterators.spliteratorUnknownSize(iterator(), Spliterator.ORDERED), false); }
/** Stream items with a blocking iterator. */ public Stream<Result> stream() { return StreamSupport.stream( Spliterators.spliteratorUnknownSize(iterator(), Spliterator.IMMUTABLE | Spliterator.SIZED), false); }
@Override public Spliterator.OfInt spliterator() { return Spliterators.spliteratorUnknownSize(iterator(), Spliterator.CONCURRENT); }
private static <T> Stream<T> toStream(Iterable<T> iterable) { return StreamSupport.stream( Spliterators.spliteratorUnknownSize(iterable.iterator(), Spliterator.ORDERED), false); }