/** * reads all methods by the action-annotations for building agent-actions * * @param p_class class * @param p_root root class * @return stream of all methods with inheritance */ private static Stream<Method> methods(final Class<?> p_class, final Class<?> p_root) { final Pair<Boolean, IAgentAction.EAccess> l_classannotation = CCommon.isActionClass(p_class); if (!l_classannotation.getLeft()) return p_class.getSuperclass() == null ? Stream.of() : methods(p_class.getSuperclass(), p_root); final Predicate<Method> l_filter = IAgentAction.EAccess.WHITELIST.equals(l_classannotation.getRight()) ? i -> !CCommon.isActionFiltered(i, p_root) : i -> CCommon.isActionFiltered(i, p_root); return Stream.concat( Arrays.stream(p_class.getDeclaredMethods()) .parallel() .map( i -> { i.setAccessible(true); return i; }) .filter(i -> !Modifier.isAbstract(i.getModifiers())) .filter(i -> !Modifier.isInterface(i.getModifiers())) .filter(i -> !Modifier.isNative(i.getModifiers())) .filter(i -> !Modifier.isStatic(i.getModifiers())) .filter(l_filter), methods(p_class.getSuperclass(), p_root)); }
public static Assignment addHint(Assignment assignment, Hint hint) { assignment.setHints( Stream.concat(assignment.getHints().stream(), Stream.of(hint)).collect(toList())); return assignment; }
private static Map<String, Method> addMethods( Map<String, Method> methods, Class<?> clazz, Predicate<Method> filter) { requireNonNull(methods); requireNonNull(filter); // clazz nullable (why?) if (clazz == Object.class) { return methods; } if (clazz == null) { return methods; } Stream.of(clazz.getDeclaredMethods()) .filter(filter) .forEach( m -> { methods.putIfAbsent(m.getName(), m); // Put only the most recent // concrete version of the // method }); addMethods(methods, clazz.getSuperclass(), filter); // Recursively add // the superclass // methods Stream.of(clazz.getInterfaces()) .forEach( i -> { addMethods(methods, i, filter); // Recursively add the extended // interfaces (because they can // contain default methods) }); return methods; }
@Override protected BackEndDataProvider<StrBean, SerializablePredicate<StrBean>> createDataProvider() { return dataProvider = new BackEndDataProvider<>( query -> { Stream<StrBean> stream = data.stream().filter(t -> query.getFilter().orElse(s -> true).test(t)); if (!query.getSortOrders().isEmpty()) { Comparator<StrBean> sorting = query .getSortOrders() .stream() .map(this::getComparator) .reduce((c1, c2) -> c1.thenComparing(c2)) .get(); stream = stream.sorted(sorting); } List<StrBean> list = stream .skip(query.getOffset()) .limit(query.getLimit()) .collect(Collectors.toList()); list.forEach(s -> System.err.println(s.toString())); return list.stream(); }, query -> (int) data.stream().filter(t -> query.getFilter().orElse(s -> true).test(t)).count()); }
public void mainCollectorExample() { Stream songsStream = getSongStream().map(s -> s.toUpperCase()); System.out.println(songsStream.count()); Set<String> setSongs = getSongStream().filter(s -> s.startsWith("it")).collect(toSet()); System.out.println("Set count:" + setSongs.size()); songsStream.close(); int sum = getSongStream().mapToInt(song -> song.toString().length()).sum(); System.out.println("sum is :" + sum); // lets understand the difference between lazy and eager evaluation getSongStream() .filter( song -> { System.out.println("coming song in lazy:" + song); return song.startsWith("al"); }); getSongStream() .filter( song -> { System.out.println("coming song in eager:" + song); return song.startsWith("al"); }) .count(); Stream<String> letters = Stream.of("b", "c", "d", null).filter(letter -> letter.isEmpty()); // Optional<String> firstLetter = Optional.of(letters.findFirst()); }
@Test public void testFunctions() { final AtomicLong lf = new AtomicLong(10L); Assert.assertEquals(Anoa.of(10L), handler.function(lf::getAndAdd).apply(handler.of(1L))); Assert.assertEquals(11L, lf.get()); Assert.assertEquals( Anoa.empty(Stream.of(Meta.OTHER)), handler .functionChecked( __ -> { throw new IOException(); }) .apply(handler.of(1L))); final AtomicLong lf2 = new AtomicLong(10L); Assert.assertEquals( Anoa.of(10L), handler.biFunction((Long x, Long y) -> lf2.getAndAdd(x + y)).apply(Anoa.of(1L), 1L)); Assert.assertEquals(12L, lf2.get()); Assert.assertEquals( Anoa.empty(Stream.of(Meta.OTHER)), handler .biFunctionChecked( (_1, _2) -> { throw new IOException(); }) .apply(Anoa.of(1L), 1L)); }
public void testConstructException() { try (Stream<String> s = Files.lines(testFolder.resolve("notExist"), Charset.forName("UTF-8"))) { s.forEach(l -> fail("File is not even exist!")); } catch (IOException ioe) { assertTrue(ioe instanceof NoSuchFileException); } }
/** * Returns whether the given {@link IntArrayND} equals the given object * * @param array The array * @param object The object * @return Whether the array equals the object */ static boolean equals(IntArrayND array, Object object) { if (array == object) { return true; } if (object == null) { return false; } if (!(object instanceof IntArrayND)) { return false; } IntArrayND other = (IntArrayND) object; if (!array.getSize().equals(other.getSize())) { return false; } Stream<MutableIntTuple> coordinates = Coordinates.coordinates(array.getPreferredIterationOrder(), array.getSize()); Iterable<MutableIntTuple> iterable = () -> coordinates.iterator(); for (IntTuple coordinate : iterable) { int arrayValue = array.get(coordinate); int otherValue = other.get(coordinate); if (arrayValue != otherValue) { return false; } } return true; }
public static void main(String[] args) { Personne[] tab = { new Personne("thibault", "Rougier", 2001), new Personne("thomas", "Niesseron", 1987), new Personne("thifaine", "Mitenne", 1959), new Personne("maxime", "Forest", 1995), new Personne("jules", "Forest", 1995) }; System.out.println("--- Nes apres 1985 : "); Stream.of(tab) .filter(pp -> pp.getAnnee() > 1985) .forEach(pp -> System.out.print(pp.getPrenom() + ", ")); System.out.println("\n--- Nes avant 2000 :"); long nombre = Stream.of(tab) .filter(pp -> pp.getAnnee() < 2000) .sorted(Comparator.comparing(Personne::getNom)) .peek(pp -> System.out.print(pp.getNom() + " ")) .count(); System.out.println("\n Ils sont " + nombre); System.out.println("--- Tous tries sur nom + prenom : "); Stream.of(tab) .sorted(Comparator.comparing(pp -> pp.getNom() + pp.getPrenom())) .forEach(pp -> System.out.print("(" + pp.getNom() + ", " + pp.getPrenom() + ") ")); }
public void filterFile() { try (Stream<String> lines = Files.lines(Paths.get(INPUT_FILE_NAME), Charset.forName("ISO-8859-1"))) { lines .skip(startLine) .filter( line -> { Matcher m = PATTERN.matcher(line); return m.matches(); }) .map( line -> { Matcher m = PATTERN.matcher(line); m.matches(); return m.group(1); }) .filter( system -> { if (system.equals(currentSystem)) { return false; } else { currentSystem = system; return true; } }) .map(system -> "jumped to " + system) .forEach(System.out::println); } catch (IOException ioe) { System.out.println(ioe.toString()); } }
private static Stream<ElmReference> getReferencesFromSingleId(ElmUpperCaseId element) { if (ElmCoreLibrary.isBuiltIn(element.getText())) { return Stream.empty(); } else { return Stream.of(new ElmTypeReference(element)); } }
@SuppressWarnings("unchecked") private static <T> T instantiate( Class<T> clazz, List<Class<?>> matchingClasses, Object... preInstancedDependencies) { try { logger.info("Creating instance of: " + clazz); Constructor<?> clazzConstructor = Stream.of(clazz.getConstructors()).findFirst().get(); Class<?>[] parameterTypes = clazzConstructor.getParameterTypes(); if (parameterTypes.length > 0) { Object[] paramInstances = Stream.of(parameterTypes) .map( paramType -> Stream.of(preInstancedDependencies) .filter(obj -> paramType.isInstance(obj)) .findFirst() .orElseGet( () -> instantiate( paramType, matchingClasses, preInstancedDependencies))) .toArray(); return (T) clazzConstructor.newInstance(paramInstances); } else { return clazz.newInstance(); } } catch (Exception e) { throw new RuntimeException(e); } }
/** * @param collector to perform aggregation / reduction operation on the results from active stage * (e.g. to Collect into a List or String) * @param fn Function that receives the results of all currently active tasks as input * @return A new builder object that can be used to define the next stage in the dataflow */ @SuppressWarnings({"unchecked", "rawtypes"}) default <T, R> SimpleReactStream<R> allOf(final Collector collector, final Function<T, R> fn) { CompletableFuture[] array = lastActiveArray(getLastActive()); CompletableFuture cf = CompletableFuture.allOf(array); Function<Exception, T> f = (Exception e) -> { BlockingStreamHelper.capture(e, getErrorHandler()); return BlockingStreamHelper.block( this, Collectors.toList(), new StreamWrapper(Stream.of(array), true)); }; CompletableFuture onFail = cf.exceptionally(f); CompletableFuture onSuccess = onFail.thenApplyAsync( (result) -> { return new StageWithResults(this.getTaskExecutor(), null, result) .submit( () -> (R) fn.apply( BlockingStreamHelper.aggregateResults( collector, Stream.of(array).collect(Collectors.toList()), getErrorHandler()))); }, getTaskExecutor()); return (SimpleReactStream<R>) withLastActive(new StreamWrapper(onSuccess, isEager())); }
@Test public void reduce() { IntStream stream = IntStream.of(1, 2, 3, 4); assertEquals(10, stream.reduce(0, (a, b) -> a + b)); Stream<Integer> stream2 = Arrays.asList(1, 2, 3, 4).stream(); stream2.reduce(0, (a, b) -> a + b); // obviously the operations can be more complex :) Stream<Integer> stream3 = Arrays.asList(1, 2, 3, 4).stream(); // reduce is a superset of map :D System.err.println( stream3.reduce( new ArrayList<Integer>(), (List<Integer> a, Integer b) -> { a.add(b); return a; }, (a, b) -> { List<Integer> c = new ArrayList<>(a); c.addAll(b); return c; })); }
/** * Given a list, this method will generate all combinations of multiset of this list. Example : * * <pre>combinationsMultiset(Arrays.asList("a", "c", "q"))</pre> * * <pre> * [a, c, q] * [c, q] * [a, q] * [a, c] * [q] * [c] * [a] * [] * </pre> * * @param l The list. * @return All combination of multiset of the given list. */ private static <E> Stream<List<E>> combinationsMultiset(List<E> l) { if (l.isEmpty()) { return Stream.of(Collections.emptyList()); } return combinationsMultiset(l.subList(1, l.size())) .flatMap(t -> Stream.of(t, pipe(l.get(0), t))); }
public void example() { System.out.println( Stream.of(IntStream.of(1), IntStream.of(2)).flatMapToInt((IntStream s) -> s).sum()); // unchanged, it's not using the varargs overload System.out.println(Stream.of(IntStream.of(1)).flatMap(s -> s.boxed()).mapToInt(i -> i).sum()); }
/** * Lit le fichier d'une grille de jeu, et en renvoie la matrix des éléments, composée de : * * <ul> * <li><code>NotEmptyElement</code> si le nombre est compris entre [1, 9] * <li><code>EmptyElement</code> sinon * </ul> * * . <br> * <i>Attention : cette méthode est redéfinie dans GridParsed, pour son cas d'utilisation * particulier de gestion des éléments plaçables</i>. * * @param path le chemin vers la grille de jeu * @return la matrix d'éléments correspondante * @see guillaume.sudotris.metier.grid.GridParsed#initFromFile(Path) */ public static Element[][] parseFileToMatrix(Path path) { final Element[][] matrix = new Element[Sudotris.LINES][Sudotris.COLUMNS]; try (Stream<String> lines = Files.lines(path)) { final byte[] lineIndex = {0}; final byte[] columnIndex = {0}; lines.forEach( line -> { line.replaceAll("\\s+", "") .chars() .forEach( letter -> { final Integer value = Character.getNumericValue(letter); if (value >= 1 && value <= 9) { matrix[lineIndex[0]][columnIndex[0]] = new NotEmptyElement(lineIndex[0], columnIndex[0], value); } else { matrix[lineIndex[0]][columnIndex[0]] = new EmptyElement(lineIndex[0], columnIndex[0]); } columnIndex[0]++; }); columnIndex[0] = 0; lineIndex[0]++; }); lines.close(); } catch (IOException e) { e.printStackTrace(); } return matrix; }
@Override public Iterator<Edge> edges(final Object... edgeIds) { try { if (0 == edgeIds.length) { return new HadoopEdgeIterator(this); } else { // base the conversion function on the first item in the id list as the expectation is that // these // id values will be a uniform list if (edgeIds[0] instanceof Edge) { // based on the first item assume all Edges in the argument list if (!Stream.of(edgeIds).allMatch(id -> id instanceof Edge)) throw Graph.Exceptions.idArgsMustBeEitherIdOrElement(); // no need to get the vertices again, so just flip it back - some implementation may want // to treat this // as a refresh operation. that's not necessary for hadoopgraph. return Stream.of(edgeIds).map(id -> (Edge) id).iterator(); } else { final Class<?> firstClass = edgeIds[0].getClass(); if (!Stream.of(edgeIds).map(Object::getClass).allMatch(firstClass::equals)) throw Graph.Exceptions .idArgsMustBeEitherIdOrElement(); // todo: change exception to be ids of the same // type return IteratorUtils.filter( new HadoopEdgeIterator(this), vertex -> ElementHelper.idExists(vertex.id(), edgeIds)); } } } catch (final IOException e) { throw new IllegalStateException(e.getMessage(), e); } }
private void validateFileSystemLoopException(Path start, Path... causes) { try (Stream<Path> s = Files.walk(start, FileVisitOption.FOLLOW_LINKS)) { try { int count = s.mapToInt(p -> 1).reduce(0, Integer::sum); fail("Should got FileSystemLoopException, but got " + count + "elements."); } catch (UncheckedIOException uioe) { IOException ioe = uioe.getCause(); if (ioe instanceof FileSystemLoopException) { FileSystemLoopException fsle = (FileSystemLoopException) ioe; boolean match = false; for (Path cause : causes) { if (fsle.getFile().equals(cause.toString())) { match = true; break; } } assertTrue(match); } else { fail("Unexpected UncheckedIOException cause " + ioe.toString()); } } } catch (IOException ex) { fail("Unexpected IOException " + ex); } }
// stream has already been operated upon or closed @Test(expected = IllegalStateException.class) public void test7() { Stream<String> stream = stringCollection.stream().filter(s -> s.startsWith("a")); stream.anyMatch(s -> true); stream.noneMatch(s -> true); }
@Test public void testConsumers() { final AtomicLong lc = new AtomicLong(0L); Assert.assertEquals(Anoa.of(1L), handler.consumer(lc::addAndGet).apply(handler.of(1L))); Assert.assertEquals(1L, lc.get()); Assert.assertEquals( Anoa.empty(Stream.of(Meta.OTHER)), handler .consumerChecked( __ -> { throw new IOException(); }) .apply(handler.of(1L))); final AtomicLong lc2 = new AtomicLong(0L); Assert.assertEquals( Anoa.of(1L), handler.biConsumer((Long x, Long y) -> lc2.addAndGet(x + y)).apply(Anoa.of(1L), 1L)); Assert.assertEquals(2L, lc2.get()); Assert.assertEquals( Anoa.empty(Stream.of(Meta.OTHER)), handler .biConsumerChecked( (_1, _2) -> { throw new IOException(); }) .apply(Anoa.of(1L), 1L)); }
protected OptionalEntity<FileConfig> getFileConfig(final CreateForm form) { final String username = systemHelper.getUsername(); final long currentTime = systemHelper.getCurrentTimeAsLong(); return getEntity(form, username, currentTime) .map( entity -> { entity.setUpdatedBy(username); entity.setUpdatedTime(currentTime); copyBeanToBean( form, entity, op -> op.exclude( Stream.concat( Stream.of(Constants.COMMON_CONVERSION_RULE), Stream.of(Constants.PERMISSIONS)) .toArray(n -> new String[n]))); final PermissionHelper permissionHelper = ComponentUtil.getPermissionHelper(); entity.setPermissions( split(form.permissions, "\n") .get( stream -> stream .map(s -> permissionHelper.encode(s)) .filter(StringUtil::isNotBlank) .distinct() .toArray(n -> new String[n]))); return entity; }); }
public List<GameObject> getGameObjectsInRange( Vector2f pos, int width, int height, Collection<GameObject> list) { Rectangle range = new Rectangle(pos.x, pos.y, width, height); Stream<GameObject> filter = list.stream().filter(e -> range.contains(e.getPos().x, e.getPos().y)); return filter.collect(Collectors.toList()); }
public static List<Path> listDir(Path dir) throws IOException { List<Path> contents = new LinkedList<>(); try (Stream<Path> list = Files.list(dir)) { list.forEach(contents::add); } return contents; }
private void afterDeploymentValidation( @Observes AfterDeploymentValidation adv, BeanManager manager) { Collection<CamelContext> contexts = new ArrayList<>(); for (Bean<?> context : manager.getBeans(CamelContext.class, ANY)) { contexts.add(getReference(manager, CamelContext.class, context)); } // Add type converters to Camel contexts CdiTypeConverterLoader loader = new CdiTypeConverterLoader(); for (Class<?> converter : converters) { for (CamelContext context : contexts) { loader.loadConverterMethods(context.getTypeConverterRegistry(), converter); } } // Add routes to Camel contexts boolean deploymentException = false; Set<Bean<?>> routes = new HashSet<>(manager.getBeans(RoutesBuilder.class, ANY)); routes.addAll(manager.getBeans(RouteContainer.class, ANY)); for (Bean<?> context : manager.getBeans(CamelContext.class, ANY)) { for (Bean<?> route : routes) { Set<Annotation> qualifiers = new HashSet<>(context.getQualifiers()); qualifiers.retainAll(route.getQualifiers()); if (qualifiers.size() > 1) { deploymentException |= !addRouteToContext(route, context, manager, adv); } } } // Let's return to avoid starting misconfigured contexts if (deploymentException) { return; } // Trigger eager beans instantiation (calling toString is necessary to force // the initialization of normal-scoped beans). // FIXME: This does not work with OpenWebBeans for bean whose bean type is an // interface as the Object methods does not get forwarded to the bean instances! eagerBeans.forEach(type -> getReferencesByType(manager, type.getJavaClass(), ANY).toString()); manager .getBeans(Object.class, ANY, STARTUP) .stream() .forEach(bean -> getReference(manager, bean.getBeanClass(), bean).toString()); // Start Camel contexts for (CamelContext context : contexts) { if (ServiceStatus.Started.equals(context.getStatus())) { continue; } logger.info("Camel CDI is starting Camel context [{}]", context.getName()); try { context.start(); } catch (Exception exception) { adv.addDeploymentProblem(exception); } } // Clean-up Stream.of(converters, camelBeans, eagerBeans, cdiBeans).forEach(Set::clear); Stream.of(producerBeans, producerQualifiers).forEach(Map::clear); }
private static Stream<String> apps() { if (cl instanceof URLClassLoader) { URLClassLoader ucl = (URLClassLoader) cl; return Stream.of(ucl.getURLs()) .map(propagating(url -> Paths.get(url.toURI()))) .flatMap( propagating( path -> { if (Files.isRegularFile(path)) { return zipContents(path); } else if (Files.isDirectory(path)) { return Files.walk(path) .map(subpath -> path.relativize(subpath)) .map(subpath -> subpath.toString()) .filter(subpath -> subpath.endsWith(".class")) .map(Scanner::toClassName); } else { return Stream.empty(); } })) .filter(x -> !x.startsWith("com.cakemanny.app.")) .filter(implementsInterface(App.class)); } else { return Stream.empty(); } }
@Test public void testFirstLast() { Supplier<Stream<Integer>> s = () -> IntStreamEx.range(1000).boxed(); checkShortCircuitCollector("first", Optional.of(0), 1, s, MoreCollectors.first()); checkShortCircuitCollector( "firstLong", Optional.of(0), 1, () -> Stream.of(1).flatMap(x -> IntStream.range(0, 1000000000).boxed()), MoreCollectors.first(), true); checkShortCircuitCollector( "first", Optional.of(1), 1, () -> Stream.iterate(1, x -> x + 1), MoreCollectors.first(), true); assertEquals( 1, (int) StreamEx.iterate(1, x -> x + 1).parallel().collect(MoreCollectors.first()).get()); checkCollector("last", Optional.of(999), s, MoreCollectors.last()); checkCollectorEmpty("first", Optional.empty(), MoreCollectors.first()); checkCollectorEmpty("last", Optional.empty(), MoreCollectors.last()); }
/** {@inheritDoc} */ @Override public void runLearning() { this.initLearning(); efBayesianNetwork = new EF_BayesianNetwork(dag); Stream<DataInstance> stream = null; if (parallelMode) { stream = dataStream.parallelStream(batchSize); } else { stream = dataStream.stream(); } dataInstanceCount = new AtomicDouble(0); sumSS = stream .peek( w -> { dataInstanceCount.getAndAdd(1.0); }) .map(efBayesianNetwork::getSufficientStatistics) .reduce( efBayesianNetwork.createZeroSufficientStatistics(), SufficientStatistics::sumVector); }
public Boolean isCollectorFullBase() throws BotException { final int[] attackableElixirs = {0}; final BufferedImage image = platform.screenshot(ENEMY_BASE); try { final URI uri = getClass().getResource("elixirs").toURI(); Utils.withClasspathFolder( uri, (path) -> { final List<Rectangle> matchedElixirs = new ArrayList<>(); try (Stream<Path> walk = Files.walk(path, 1)) { for (final Iterator<Path> it = walk.iterator(); it.hasNext(); ) { final Path next = it.next(); if (Files.isDirectory(next)) { continue; } final BufferedImage tar = ImageIO.read(Files.newInputStream(next, StandardOpenOption.READ)); final List<RegionMatch> doFindAll = TemplateMatcher.findMatchesByGrayscaleAtOriginalResolution(image, tar, 7, 0.8); attackableElixirs[0] += countAttackableElixirs(doFindAll, matchedElixirs, next); } } catch (final IOException e) { logger.log(Level.SEVERE, e.getMessage(), e); } }); } catch (final URISyntaxException e) { logger.log(Level.SEVERE, e.getMessage(), e); } return attackableElixirs[0] >= 0; }
@SuppressWarnings({"unchecked", "rawtypes"}) @Override public Stream<?> doApply(List<Stream<?>> streamsList) { AtomicInteger ctr = new AtomicInteger(2); Stream<?> unionizedStream = streamsList .stream() .reduce( (lStream, rStream) -> { Stream<?> newStream = Stream.concat(lStream, rStream); int currentStreamIdx = ctr.getAndIncrement(); for (int j = 0; j < checkPointProcedures.size(); j++) { Tuple2<Integer, Object> postProc = checkPointProcedures.get(j); if ((Integer) postProc._1() == currentStreamIdx) { SerFunction f = (SerFunction) postProc._2(); if (f != null) { newStream = (Stream) f.apply(newStream); } } } return newStream; }) .get(); if (this.distinct) { unionizedStream = unionizedStream.distinct(); } return unionizedStream; }