/** * A helper function for dumping the accuracy of the trained classifier. * * @param classifier The classifier to evaluate. * @param dataset The dataset to evaluate the classifier on. */ public static void dumpAccuracy( Classifier<ClauseSplitter.ClauseClassifierLabel, String> classifier, GeneralDataset<ClauseSplitter.ClauseClassifierLabel, String> dataset) { DecimalFormat df = new DecimalFormat("0.00%"); log("size: " + dataset.size()); log( "split count: " + StreamSupport.stream(dataset.spliterator(), false) .filter(x -> x.label() == ClauseSplitter.ClauseClassifierLabel.CLAUSE_SPLIT) .collect(Collectors.toList()) .size()); log( "interm count: " + StreamSupport.stream(dataset.spliterator(), false) .filter(x -> x.label() == ClauseSplitter.ClauseClassifierLabel.CLAUSE_INTERM) .collect(Collectors.toList()) .size()); Pair<Double, Double> pr = classifier.evaluatePrecisionAndRecall( dataset, ClauseSplitter.ClauseClassifierLabel.CLAUSE_SPLIT); log("p (split): " + df.format(pr.first)); log("r (split): " + df.format(pr.second)); log("f1 (split): " + df.format(2 * pr.first * pr.second / (pr.first + pr.second))); pr = classifier.evaluatePrecisionAndRecall( dataset, ClauseSplitter.ClauseClassifierLabel.CLAUSE_INTERM); log("p (interm): " + df.format(pr.first)); log("r (interm): " + df.format(pr.second)); log("f1 (interm): " + df.format(2 * pr.first * pr.second / (pr.first + pr.second))); }
default Stream<T> reveresedStream() { Object streamable = getStreamable(); if (streamable instanceof List) { return StreamSupport.stream(new ReversedIterator((List) streamable).spliterator(), false); } if (streamable instanceof Object[]) { List arrayList = Arrays.asList((Object[]) streamable); return StreamSupport.stream(new ReversedIterator(arrayList).spliterator(), false); } return SeqUtils.reverse(stream()); }
/** * Build TrendContent with external services top by average response time. * * @param period * @param limit * @param timeRange * @param transactions * @param transactionsStatistic * @return */ public static TrendContext<String> topByAvgResponseTime( Long period, Integer limit, TimeRange timeRange, Iterable<ExternalService> transactions, Iterable<ExternalServiceStatistic> transactionsStatistic) { TrendContext<String> trendContext = new TrendContext<>(period * 1000, timeRange); Map<Long, ExternalService> externalServiceMap = StreamSupport.stream(transactions.spliterator(), false) .collect(Collectors.toMap(ExternalService::getId, (t) -> t)); Map<String, List<ExternalServiceStatistic>> transactionStatisticMap = StreamSupport.stream(transactionsStatistic.spliterator(), false) .collect( Collectors.groupingBy( statistic -> { Long serviceId = statistic.getExternalServiceId(); ExternalService transaction = externalServiceMap.get(serviceId); return transaction.getUrl(); })); transactionStatisticMap .entrySet() .stream() .sorted( Comparator.comparing( new Function<Map.Entry<String, List<ExternalServiceStatistic>>, Double>() { @Override public Double apply(Map.Entry<String, List<ExternalServiceStatistic>> entry) { DoubleSummaryStatistics responseSummaryStatistics = entry .getValue() .stream() .filter(statistic -> statistic.getSumResponseTime() != null) .mapToDouble(ExternalServiceStatistic::getSumResponseTime) .summaryStatistics(); LongSummaryStatistics pvSummaryStatistics = entry .getValue() .stream() .filter(statistic -> statistic.getPv() != null) .mapToLong(ExternalServiceStatistic::getPv) .summaryStatistics(); return calculateRate( responseSummaryStatistics.getSum(), pvSummaryStatistics.getSum()); } })) .limit(limit) .forEach(entry -> trendContext.addStatistics(entry.getKey(), timeRange, entry.getValue())); return trendContext; }
public static <A, B> Stream<Pair<A, B>> zip(Stream<A> s1, Stream<B> s2, int size) { PairIterator<A, B, Pair<A, B>> itr = new PairIterator<>(s1.iterator(), s2.iterator(), Pair<A, B>::new); int characteristics = Spliterator.IMMUTABLE | Spliterator.NONNULL; if (size < 0) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(itr, characteristics), false); } return StreamSupport.stream(Spliterators.spliterator(itr, size, characteristics), false); }
/** @return New Stream */ default Stream<T> stream() { Object streamable = getStreamable(); if (streamable instanceof Stream) return (Stream) streamable; if (streamable instanceof Iterable) return StreamSupport.stream(((Iterable) streamable).spliterator(), false); return new InvokeDynamic() .stream(streamable) .orElseGet( () -> (Stream) StreamSupport.stream( AsDecomposable.asDecomposable(streamable).unapply().spliterator(), false)); }
@SuppressWarnings("unchecked") @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { if (aggregation instanceof InternalMultiBucketAggregation) { @SuppressWarnings("rawtypes") InternalMultiBucketAggregation multiBucketsAgg = (InternalMultiBucketAggregation) aggregation; List<? extends Bucket> buckets = multiBucketsAgg.getBuckets(); List<Bucket> newBuckets = new ArrayList<>(); for (int i = 0; i < buckets.size(); i++) { InternalMultiBucketAggregation.InternalBucket bucket = (InternalMultiBucketAggregation.InternalBucket) buckets.get(i); InternalAggregation aggToAdd = doReduce(bucket.getAggregations(), reduceContext); List<InternalAggregation> aggs = StreamSupport.stream(bucket.getAggregations().spliterator(), false) .map( (p) -> { return (InternalAggregation) p; }) .collect(Collectors.toList()); aggs.add(aggToAdd); InternalMultiBucketAggregation.InternalBucket newBucket = multiBucketsAgg.createBucket(new InternalAggregations(aggs), bucket); newBuckets.add(newBucket); } return multiBucketsAgg.create(newBuckets); } else if (aggregation instanceof InternalSingleBucketAggregation) { InternalSingleBucketAggregation singleBucketAgg = (InternalSingleBucketAggregation) aggregation; InternalAggregation aggToAdd = doReduce(singleBucketAgg.getAggregations(), reduceContext); List<InternalAggregation> aggs = StreamSupport.stream(singleBucketAgg.getAggregations().spliterator(), false) .map( (p) -> { return (InternalAggregation) p; }) .collect(Collectors.toList()); aggs.add(aggToAdd); return singleBucketAgg.create(new InternalAggregations(aggs)); } else { throw new IllegalStateException( "Aggregation [" + aggregation.getName() + "] must be a bucket aggregation [" + aggregation.type().name() + "]"); } }
private static MatchTableImpl createEmptyTable( VariantGraphRanking ranking, VariantGraph graph, Iterable<Token> witness) { // -2 === ignore the start and the end vertex return new MatchTableImpl( StreamSupport.stream(witness.spliterator(), false).toArray(Token[]::new), IntStream.range(0, Math.max(0, ranking.apply(graph.getEnd()) - 1)).toArray()); }
@Override public void saveAllMembersAndSubsystems(Collection<Member> members) { LocalDateTime now = LocalDateTime.now(); // process members Map<MemberId, Member> unprocessedOldMembers = new HashMap<>(); StreamSupport.stream(memberRepository.findAll().spliterator(), false) .forEach(member -> unprocessedOldMembers.put(member.createKey(), member)); for (Member member : members) { Member oldMember = unprocessedOldMembers.get(member.createKey()); if (oldMember == null) { // brand new item member.getStatusInfo().setTimestampsForNew(now); for (Subsystem subsystem : member.getAllSubsystems()) { subsystem.getStatusInfo().setTimestampsForNew(now); subsystem.setMember(member); } member = memberRepository.save(member); } else { handleOldMember(now, member, oldMember); member = memberRepository.save(oldMember); } unprocessedOldMembers.remove(member.createKey()); } // now unprocessedOldMembers should all be removed (either already removed, or will be now) removeUnprocessedOldMembers(now, unprocessedOldMembers); }
@Override public BrowseResult<Asset> browseComponentAssets( final Repository repository, final String componentId) { checkNotNull(repository); checkNotNull(componentId); try (StorageTx storageTx = repository.facet(StorageFacet.class).txSupplier().get()) { storageTx.begin(); Component component = storageTx.findComponent(new DetachedEntityId(componentId)); if (component == null) { return new BrowseResult<>(0, Collections.emptyList()); } VariableResolverAdapter variableResolverAdapter = variableResolverAdapterManager.get(component.format()); List<Asset> assets = StreamSupport.stream(storageTx.browseAssets(component).spliterator(), false) .filter( (Asset asset) -> contentPermissionChecker.isPermitted( repository.getName(), asset.format(), BreadActions.BROWSE, variableResolverAdapter.fromAsset(asset))) .collect(Collectors.toList()); return new BrowseResult<>(assets.size(), assets); } }
private void assertNodesPresent(RoutingNodes routingNodes, String... nodes) { final Set<String> keySet = StreamSupport.stream(routingNodes.spliterator(), false) .map((p) -> (p.nodeId())) .collect(Collectors.toSet()); assertThat(keySet, containsInAnyOrder(nodes)); }
default Stream<StyleSpan<S>> stream() { Spliterator<StyleSpan<S>> spliterator = new Spliterator<StyleSpan<S>>() { private final Iterator<StyleSpan<S>> iterator = iterator(); @Override public boolean tryAdvance(Consumer<? super StyleSpan<S>> action) { if (iterator.hasNext()) { action.accept(iterator.next()); return true; } else { return false; } } @Override public Spliterator<StyleSpan<S>> trySplit() { return null; } @Override public long estimateSize() { return getSpanCount(); } @Override public int characteristics() { return Spliterator.IMMUTABLE | Spliterator.SIZED; } }; return StreamSupport.stream(spliterator, false); }
public static <T> Stream<T> traverse( Function<T, Iterator<T>> iteratorBuilder, boolean includeParent, @Nullable T obj) { return ((obj != null) ? StreamSupport.stream( new TraversingSpliterator<>(iteratorBuilder, includeParent, obj), false) : Stream.empty()); }
/** * Gets the quantity of the Mesos resource specified by {@code type}. * * @param resources Mesos resources. * @param type Type of resource to quantify. * @return Aggregate Mesos resource value. */ public static Double quantityOfMesosResource(Iterable<Resource> resources, ResourceType type) { return StreamSupport.stream(resources.spliterator(), false) .filter(r -> SUPPORTED_RESOURCE.apply(r)) .filter(r -> fromResource(r).equals(type)) .map(QUANTIFY_MESOS_RESOURCE) .reduce(REDUCE_VALUES) .orElse(0.0); }
@Override public Iterable<GSSNode> getGSSNodes() { return grammarGraph .getNonterminals() .stream() .flatMap(s -> StreamSupport.stream(s.getGSSNodes().spliterator(), false)) .collect(Collectors.toList()); }
/** search for the cart corresponding to the query. */ @Transactional(readOnly = true) public List<Cart> search(String query) { log.debug("REST request to search Carts for query {}", query); return StreamSupport.stream( cartSearchRepository.search(queryStringQuery(query)).spliterator(), false) .collect(Collectors.toList()); }
public Stream<T> parallelStream() { BlockingIterable.SubscriberIterator<T> it = createIterator(); source.subscribe(it); Spliterator<T> sp = Spliterators.spliteratorUnknownSize(it, 0); return StreamSupport.stream(sp, true).onClose(it); }
public static <T> String listToString(Iterable<T> elements, String sep) { if (elements == null) throw new IllegalArgumentException("elements cannot be null."); Stream<T> stream = StreamSupport.stream(elements.spliterator(), false); return stream.map(a -> a.toString()).collect(Collectors.joining(sep)); }
// (Event is in progress && Event has reserved stands) public ResponseDTO findAllUserEvents(Integer page, Integer limit) { List<BigInteger> eventIds = StreamSupport.stream( expoEventRepository .findAll(ExpoEventPredicates.eventsIsInProgressAndRemovedIsFalse()) .spliterator(), false) .filter( expoEvent -> { ExpoHall expoHall = expoHallService.findOne(expoEvent.getHallId()); return StreamSupport.stream( expoStandRepository .findAll( ExpoStandPredicates.eqHallIdAndRemovedIsFalse(expoHall.getId())) .spliterator(), false) .anyMatch(expoStand -> expoStandService.isStandReserved(expoStand.getId())); }) .map(AbstractDocument::getId) .collect(Collectors.toList()); /*Page<ExpoEvent> expoEventPage = expoEventRepository.findAll( ExpoEventPredicates.eventIsInProgress(), new PageRequest(--page, limit, Sort.Direction.DESC, "lastModifiedDate") );*/ Page<ExpoEvent> expoEventPage = expoEventRepository.findAll( eventIds, new PageRequest(--page, limit, Sort.Direction.DESC, "lastModifiedDate")); List<ExpoEventDTO> eventDTOs = mapper.mapAsList(expoEventPage.getContent(), ExpoEventDTO.class); eventDTOs.forEach( eventDTO -> { if (eventDTO.getLogo() != null) { eventDTO .getLogo() .setBytes( ImageResizeUtil.resize( eventDTO.getLogo().getBytes(), thumbnailSize, eventDTO.getLogo().getContentType())); } eventDTO.setExpoHall(getExpoHall(eventDTO.getHallId())); }); return ResponseDTO.getBuilder().data(eventDTOs).total(expoEventPage.getTotalElements()).build(); }
private void initReadonlyValidation(ValidationResource validationResource) { List<AttributeMetaData> readonlyAttrs = StreamSupport.stream(getEntityMetaData().getAtomicAttributes().spliterator(), false) .filter(attr -> attr.isReadonly() && attr.getExpression() == null) .collect(Collectors.toList()); validationResource.setReadonlyAttrs(readonlyAttrs); }
@Override public Iterator<Bindings> iterator() { return StreamSupport.stream( Spliterators.spliteratorUnknownSize( iterator(skip != null ? skip : 0, limit != null ? limit : 0), 0), false) .map(v -> (Bindings) new SimpleBindings(v)) .iterator(); }
public FormOptionSetJson(final FormOptionSet formOptionSet) { this.formOptionSet = formOptionSet; this.occurrences = new OccurrencesJson(formOptionSet.getOccurrences()); this.multiselection = new OccurrencesJson(formOptionSet.getMultiselection()); this.options = StreamSupport.stream(formOptionSet.spliterator(), false) .map(FormOptionSetOptionJson::new) .collect(Collectors.toList()); }
@PostConstruct public void resetItemWithIncorrectState() { log.info("Reset des Started et Paused"); StreamSupport.stream( itemRepository.findByStatus(Status.STARTED, Status.PAUSED).spliterator(), false) .map(item -> item.setStatus(Status.NOT_DOWNLOADED)) .forEach(itemRepository::save); }
/** SEARCH /_search/prices/:query -> search for the price corresponding to the query. */ @RequestMapping( value = "/_search/prices/{query}", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public List<Price> search(@PathVariable String query) { return StreamSupport.stream( priceSearchRepository.search(queryString(query)).spliterator(), false) .collect(Collectors.toList()); }
@Override protected List<Predicate> doGenerate( CriteriaBuilder criteriaBuilder, Root<?> entity, String field, Iterable<Expression<Object>> expressions) { Expression[] expressionArray = StreamSupport.stream(expressions.spliterator(), false).toArray(Expression[]::new); return Arrays.asList(criteriaBuilder.not(entity.get(field).in(expressionArray))); }
public Stream<Node> nodeStream(boolean includeInlinedNodes) { Iterator<Node> iterator; TruffleInlining inliner = getInlining(); if (includeInlinedNodes && inliner != null) { iterator = inliner.makeNodeIterator(this); } else { iterator = NodeUtil.makeRecursiveIterator(this.getRootNode()); } return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, 0), false); }
/** SEARCH /_search/cardTypes/:query -> search for the cardType corresponding to the query. */ @RequestMapping( value = "/_search/cardTypes/{query:.+}", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public List<CardType> searchCardTypes(@PathVariable String query) { log.debug("REST request to search CardTypes for query {}", query); return StreamSupport.stream( cardTypeSearchRepository.search(queryStringQuery(query)).spliterator(), false) .collect(Collectors.toList()); }
/** * SEARCH /_search/labels?query=:query : search for the label corresponding to the query. * * @param query the query of the label search * @return the result of the search */ @RequestMapping( value = "/_search/labels", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public List<Label> searchLabels(@RequestParam String query) { log.debug("REST request to search Labels for query {}", query); return StreamSupport.stream( labelSearchRepository.search(queryStringQuery(query)).spliterator(), false) .collect(Collectors.toList()); }
private EvolutionStart<G, C> evolutionStart( final Iterable<Genotype<G>> genotypes, final long generation) { final Stream<Phenotype<G, C>> stream = Stream.concat( StreamSupport.stream(genotypes.spliterator(), false) .map(gt -> Phenotype.of(gt, generation, _fitnessFunction, _fitnessScaler)), Stream.generate(() -> newPhenotype(generation))); final Population<G, C> population = stream.limit(getPopulationSize()).collect(toPopulation()); return EvolutionStart.of(population, generation); }
@Test(dataProvider = "testData") public void testIntervalPairFilter( final List<Interval> intervals, final long expectedPassingRecords) { final IntervalKeepPairFilter filter = new IntervalKeepPairFilter(intervals); long actualPassingRecords = StreamSupport.stream(builder.spliterator(), false) .filter(rec -> !filter.filterOut(rec)) .count(); Assert.assertEquals(actualPassingRecords, expectedPassingRecords); }
public List<ObjectId> getIds(User user) { DBObject group = MongoUtils.group(new BasicDBObject(ID, MongoUtils.valueOf(MongoMeasurement.SENSOR, ID))); DBObject match = MongoUtils.match(MongoMeasurement.USER, ref(user)); AggregationOutput result = getMongoDB().getDatastore().getCollection(MongoMeasurement.class).aggregate(match, group); result.getCommandResult().throwOnError(); return StreamSupport.stream(result.results().spliterator(), false) .map(x -> (ObjectId) x.get(ID)) // .map(x -> new Key<>(MongoSensor.class, x)) .collect(Collectors.toList()); }