public Stream<T> parallelStream() { BlockingIterable.SubscriberIterator<T> it = createIterator(); source.subscribe(it); Spliterator<T> sp = Spliterators.spliteratorUnknownSize(it, 0); return StreamSupport.stream(sp, true).onClose(it); }
@Override public Iterator<Bindings> iterator() { return StreamSupport.stream( Spliterators.spliteratorUnknownSize( iterator(skip != null ? skip : 0, limit != null ? limit : 0), 0), false) .map(v -> (Bindings) new SimpleBindings(v)) .iterator(); }
public Stream<Node> nodeStream(boolean includeInlinedNodes) { Iterator<Node> iterator; TruffleInlining inliner = getInlining(); if (includeInlinedNodes && inliner != null) { iterator = inliner.makeNodeIterator(this); } else { iterator = NodeUtil.makeRecursiveIterator(this.getRootNode()); } return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, 0), false); }
private Stream<String> parseToLine(InputStream is) { CsvScanner scanner = new CsvScanner( is, formatter.getCharset(), formatter.getQuoteChar(), formatter.getEscapeChar()); int characteristic = Spliterator.ORDERED | Spliterator.NONNULL | Spliterator.IMMUTABLE; Spliterator<String> spliterator = Spliterators.spliteratorUnknownSize(new CsvLineIterator(scanner), characteristic); RunnableUtil.ThrowingRunnable<IOException> close = scanner::close; return StreamSupport.stream(spliterator, false) .onClose(close.ignoreThrown(UncheckedIOException::new)); }
public static <A, B> Stream<Pair<A, B>> zip(Stream<A> s1, Stream<B> s2, int size) { PairIterator<A, B, Pair<A, B>> itr = new PairIterator<>(s1.iterator(), s2.iterator(), Pair<A, B>::new); int characteristics = Spliterator.IMMUTABLE | Spliterator.NONNULL; if (size < 0) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(itr, characteristics), false); } return StreamSupport.stream(Spliterators.spliterator(itr, size, characteristics), false); }
private <T> List<T> getAll(Class<T> clazz, String elasticType) { String query = "{\n" + " \"query\": {\n" + " \"match_all\": {}\n" + " },\n" + " \"size\": " + Integer.MAX_VALUE + "\n" + "}"; SearchResult searchResult = performSearchOnType(query, elasticType); return StreamSupport.stream( Spliterators.spliteratorUnknownSize( searchResult.getHits(clazz).iterator(), Spliterator.ORDERED), false) .map(hitResult -> hitResult.source) .collect(Collectors.toList()); }
private static <T> Stream<T> iterToStream(Iterator<T> iter) { Spliterator<T> splititer = Spliterators.spliteratorUnknownSize(iter, Spliterator.IMMUTABLE); return StreamSupport.stream(splititer, false); }
@Override public Spliterator.OfLong spliterator() { return Spliterators.spliteratorUnknownSize(iterator(), 0); }
private static Stream<String[]> fileToStream(String filePath) throws FileNotFoundException { com.opencsv.CSVReader reader = new com.opencsv.CSVReader(new FileReader(filePath)); Iterator<String[]> iterator = reader.iterator(); return StreamSupport.stream( Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false); }
/** * Returns the encoded output stream of the underlying {@link Stream}'s encoder. * * @return the encoded output stream. */ public Stream<int[]> getOutputStream() { if (isTerminal()) { throw new IllegalStateException("Stream is already \"terminal\" (operated upon or empty)"); } final MultiEncoder encoder = (MultiEncoder) getEncoder(); if (encoder == null) { throw new IllegalStateException( "setLocalParameters(Parameters) must be called before calling this method."); } // Protect outputStream formation and creation of "fan out" also make sure // that no other thread is trying to update the fan out lists Stream<int[]> retVal = null; try { criticalAccessLock.lock(); final String[] fieldNames = getFieldNames(); final FieldMetaType[] fieldTypes = getFieldTypes(); if (outputStream == null) { if (indexFieldMap.isEmpty()) { for (int i = 0; i < fieldNames.length; i++) { indexFieldMap.put(fieldNames[i], i); } } // NOTE: The "inputMap" here is a special local implementation // of the "Map" interface, overridden so that we can access // the keys directly (without hashing). This map is only used // for this use case so it is ok to use this optimization as // a convenience. if (inputMap == null) { inputMap = new InputMap(); inputMap.fTypes = fieldTypes; } final boolean isParallel = delegate.getInputStream().isParallel(); output = new ArrayList<>(); outputStream = delegate .getInputStream() .map( l -> { String[] arr = (String[]) l; inputMap.arr = arr; return input(arr, fieldNames, fieldTypes, output, isParallel); }); mainIterator = outputStream.iterator(); } LinkedList<int[]> l = new LinkedList<int[]>(); fanOuts.add(l); Copy copy = new Copy(l); retVal = StreamSupport.stream( Spliterators.spliteratorUnknownSize( copy, Spliterator.ORDERED | Spliterator.NONNULL | Spliterator.IMMUTABLE), false); } catch (Exception e) { e.printStackTrace(); } finally { criticalAccessLock.unlock(); } return retVal; }
public Stream<U> stream() { return StreamSupport.stream( Spliterators.spliteratorUnknownSize(reversedIterator(), Spliterator.ORDERED), false); }
/** Stream items with a blocking iterator. */ public Stream<Result> stream() { return StreamSupport.stream( Spliterators.spliteratorUnknownSize(iterator(), Spliterator.IMMUTABLE | Spliterator.SIZED), false); }
private static <T> Stream<T> toStream(Iterable<T> iterable) { return StreamSupport.stream( Spliterators.spliteratorUnknownSize(iterable.iterator(), Spliterator.ORDERED), false); }
@Override public Spliterator.OfInt spliterator() { return Spliterators.spliteratorUnknownSize(iterator(), Spliterator.CONCURRENT); }
@Override public Stream<List<T>> stream() { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator(), 0), false); }
public Stream<Node> stream() { return StreamSupport.stream( Spliterators.spliteratorUnknownSize(iterator(), Spliterator.ORDERED), false); }