public static void main(String[] args) throws FileNotFoundException { ArrayList<String> names = new ArrayList<>(); File f = new File("people.csv"); Scanner scanner = new Scanner(f); scanner.nextLine(); while (scanner.hasNext()) { String line = scanner.nextLine(); String[] columns = line.split(","); String name = columns[1] + " " + columns[2]; names.add(name); } String searchTerm = "ali"; ArrayList<String> results = new ArrayList<>(); for (String name : names) { if (name.toLowerCase().contains(searchTerm)) { results.add(name); } } System.out.println(results); // To use stream: results = names .stream() .filter( (name) -> { return name.toLowerCase().contains(searchTerm); }) .collect(Collectors.toCollection(ArrayList<String>::new)); System.out.println(results); }
public static void main(String[] args) throws InterruptedException, AlreadyConnectedException, JIException, IOException, NotConnectedException, DuplicateGroupException, AddFailedException { final FileWriter fileWriter = new FileWriter("data.csv"); fileWriter.write("sep=,\n"); fileWriter.flush(); CSVWriter writer = new CSVWriter(fileWriter, ',', '\0'); writer.writeNext( Tag.TAG_TO_ID_MAPPING .entrySet() .stream() .map(tagStringEntry -> tagStringEntry.getKey().toString()) .collect(Collectors.toCollection(LinkedList::new)) .toArray(new String[0])); final OPCDataReader opcDataReader = new OPCDataReader(Tag.TAG_TO_ID_MAPPING).startReading(); Thread.sleep(2000); while (true) { final Map<Tag, Double> actualValues = opcDataReader.getActualValues(); System.out.println(actualValues); final String[] data = Tag.TAG_TO_ID_MAPPING .entrySet() .stream() .map(tagStringEntry -> "" + actualValues.get(tagStringEntry.getKey())) .collect(Collectors.toCollection(LinkedList::new)) .toArray(new String[0]); writer.writeNext(data); writer.flush(); Thread.sleep(1000); } }
static <T> T unwrapOtherMonadTypes(Comprehender<T> comp, Object apply) { if (comp.instanceOfT(apply)) return (T) apply; if (apply instanceof Optional) { if (((Optional) apply).isPresent()) return comp.of(((Optional) apply).get()); return comp.empty(); } if (apply instanceof Stream) { return comp.of(((Stream) apply).collect(Collectors.toCollection(MaterializedList::new))); } if (apply instanceof IntStream) { return comp.of( ((IntStream) apply).boxed().collect(Collectors.toCollection(MaterializedList::new))); } if (apply instanceof DoubleStream) { return comp.of( ((DoubleStream) apply).boxed().collect(Collectors.toCollection(MaterializedList::new))); } if (apply instanceof LongStream) { return comp.of( ((LongStream) apply).boxed().collect(Collectors.toCollection(MaterializedList::new))); } if (apply instanceof CompletableFuture) { return comp.of(((CompletableFuture) apply).join()); } if (apply instanceof StreamT) return comp.of(((StreamT) apply).unwrap()); return (T) new ComprehenderSelector() .selectComprehender(apply) .resolveForCrossTypeFlatMap(comp, apply); }
@Override public void loadData(String fileName, String sheetName) throws RestLoaderException, IOException { // Initialize init(); // Read log.info("Reading data from the file.."); List<FileData<PatientVisitDTO>> patientVisits = excelFileReader.readData(fileName, sheetName); log.info("Data read complete.."); // Transform log.info("Transforming the data read.."); patientVisits = patientVisitDataTransformer.populateModelList(patientVisits); log.info("Data transformation complete.."); // Load log.info("Loading the data into PIM.."); List<FileData<PatientVisitDTO>> post = patientVisits .stream() .filter( data -> DataLoaderConstants.POST_OPERATION.equalsIgnoreCase(data.getOperationType())) .collect(Collectors.toCollection(ArrayList::new)); List<FileData<PatientVisitDTO>> put = patientVisits .stream() .filter( data -> DataLoaderConstants.PUT_OPERATION.equalsIgnoreCase(data.getOperationType())) .collect(Collectors.toCollection(ArrayList::new)); Map<String, List<FileData<PatientVisitDTO>>> groupedPostData = convertToMap(post); post = doPost(groupedPostData); List<FileData<PatientVisitDTO>> putList = new ArrayList<>(); put.forEach( dataSheet -> { try { addPathParam( env.getProperty(DataLoaderConstants.PATIENT_ID_KEY), dataSheet.getEntity().getPatientNumber()); addPathParam( env.getProperty(DataLoaderConstants.PATIENT_VISIT_ID_KEY), dataSheet.getEntity().getId()); putList.add(doPut(dataSheet)); } catch (RestLoaderException e) { } }); patientVisits = Stream.concat(post.stream(), putList.stream()).collect(Collectors.toList()); log.info("Data loading complete.."); // Write log.info("Writing the results back to the file.."); excelFileWriter.writeResult(fileName, patientVisits, sheetName); log.info("Writing results complete.."); }
@Override protected GetFieldMappingsResponse shardOperation( final GetFieldMappingsIndexRequest request, ShardId shardId) { assert shardId != null; IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); Collection<String> typeIntersection; if (request.types().length == 0) { typeIntersection = indexService.mapperService().types(); } else { typeIntersection = indexService .mapperService() .types() .stream() .filter(type -> Regex.simpleMatch(request.types(), type)) .collect(Collectors.toCollection(ArrayList::new)); if (typeIntersection.isEmpty()) { throw new TypeMissingException(shardId.getIndex(), request.types()); } } MapBuilder<String, Map<String, FieldMappingMetaData>> typeMappings = new MapBuilder<>(); for (String type : typeIntersection) { DocumentMapper documentMapper = indexService.mapperService().documentMapper(type); Map<String, FieldMappingMetaData> fieldMapping = findFieldMappingsByType(documentMapper, request); if (!fieldMapping.isEmpty()) { typeMappings.put(type, fieldMapping); } } return new GetFieldMappingsResponse( singletonMap(shardId.getIndexName(), typeMappings.immutableMap())); }
static ApplyChangesOptions fromInputAndConfigFiles( ApplyChangesInput input, FileSystem fileSystem, DataFormats dataFormats, Path defaultConfigPath) { ApplyChangesOptions options = fromInput(input, fileSystem, dataFormats); List<Path> configPaths = input .getConfigPaths() .stream() .map(fileSystem::getPath) .collect(Collectors.toCollection(ArrayList::new)); configPaths.add(defaultConfigPath); for (Path configPath : configPaths) { if (Files.exists(configPath)) { // TODO: eventually maybe don't assume YAML options = options.fallingBackTo(ApplyChangesOptions.fromYaml(configPath)); } } return options; }
public void testFind() throws IOException { PathBiPredicate pred = new PathBiPredicate((path, attrs) -> true); try (Stream<Path> s = Files.find(testFolder, Integer.MAX_VALUE, pred)) { Set<Path> result = s.collect(Collectors.toCollection(TreeSet::new)); assertEquals(pred.visited(), all); assertEquals(result.toArray(new Path[0]), pred.visited()); } pred = new PathBiPredicate((path, attrs) -> attrs.isSymbolicLink()); try (Stream<Path> s = Files.find(testFolder, Integer.MAX_VALUE, pred)) { s.forEach(path -> assertTrue(Files.isSymbolicLink(path))); assertEquals(pred.visited(), all); } pred = new PathBiPredicate((path, attrs) -> path.getFileName().toString().startsWith("e")); try (Stream<Path> s = Files.find(testFolder, Integer.MAX_VALUE, pred)) { s.forEach(path -> assertEquals(path.getFileName().toString(), "empty")); assertEquals(pred.visited(), all); } pred = new PathBiPredicate( (path, attrs) -> path.getFileName().toString().startsWith("l") && attrs.isRegularFile()); try (Stream<Path> s = Files.find(testFolder, Integer.MAX_VALUE, pred)) { s.forEach(path -> fail("Expect empty stream")); assertEquals(pred.visited(), all); } }
public static void main(String[] args) { List<Participante> participantes = ParticipantesTdc.listaTodos(); // Operações Simples participantes.stream().count(); boolean participantInscrito = participantes.stream().anyMatch(Participante::isInscrito); Optional<String> optionalEmail2 = participantes.stream().map(Participante::getEmail).findAny(); // Collectors List<String> emailsList = participantes.stream().map(Participante::getEmail).collect(Collectors.toList()); List<String> emailsLinkedList = participantes .stream() .map(Participante::getEmail) .collect(Collectors.toCollection(LinkedList::new)); Set<String> emailsSet = participantes.stream().map(Participante::getEmail).collect(Collectors.toSet()); Map<TamanhoCamisa, List<Participante>> participantesPorTamanho = participantes.stream().collect(Collectors.groupingBy(Participante::getTamanhoCamisa)); Map<TamanhoCamisa, List<String>> emailsPorTamanho = participantes .stream() .collect( Collectors.groupingBy( Participante::getTamanhoCamisa, Collectors.mapping(Participante::getEmail, Collectors.toList()))); }
private ArrayList<FakeNode> getNeighbours(long id) { ArrayList<FakeNode> retNodes = new ArrayList<>(); ArrayList<Segment> neighbours = this.segments .stream() .filter( (Segment s) -> { return s.mOrigin == id; }) .collect(Collectors.toCollection(ArrayList<Segment>::new)); neighbours.addAll( this.fakeSegments .stream() .filter( (Segment s) -> { return s.mOrigin == id; }) .collect(Collectors.toCollection(ArrayList<Segment>::new))); FakeNode node; for (Segment s : neighbours) { if (fakeNodes.containsKey(s.mDestination)) { node = fakeNodes.get(s.mDestination); } else { node = new FakeNode(nodes.get(s.mDestination)); fakeNodes.put(s.mDestination, node); } ArrayList<CostFunction> functions = costs .stream() .filter( (CostFunction c) -> { return c.getSegmentId() == s.mId; }) .collect(Collectors.toCollection(ArrayList<CostFunction>::new)); functions.addAll( fakeCosts .stream() .filter( (CostFunction c) -> { return c.getSegmentId() == s.mId; }) .collect(Collectors.toCollection(ArrayList<CostFunction>::new))); node.node.cost = functions.get(0); retNodes.add(fakeNodes.get(s.mDestination)); } return retNodes; }
@Override public ArrayList<ChatRoomDTO> findByUserId(Token token, UserId userId) { return chatRoomRepository .findChatRoomsByUserId(userId.id) .stream() .map(chatRoom -> new ChatRoomDTO(chatRoom.getId(), chatRoom.getName(), 0, 0)) .collect(Collectors.toCollection(ArrayList<ChatRoomDTO>::new)); }
public Set<Module> getModulesByPid(String pid) { Set<Module> modules = getModules() .stream() .filter(module -> pid.equals(module.getParentId())) .collect(Collectors.toCollection(() -> new LinkedHashSet<>())); return modules; }
private List<Point> generatePoints(Point point) { int x = point.getX(); int y = point.getY(); return Arrays.asList( new Point(x - 1, y), new Point(x + 1, y), new Point(x, y - 1), new Point(x, y + 1)) .stream() .filter(p -> isInRange(p)) .collect(Collectors.toCollection(ArrayList::new)); }
@Nonnull protected Set<MinecraftPlayer> commandSourceCollectionToMinecraftPlayer( @Nonnull Collection<? extends CommandSource> collection) { return collection .stream() .filter(source -> source instanceof Player) .map(player -> new MinecraftPlayer(player.getName(), ((Player) player).getUniqueId())) .collect(Collectors.toCollection(HashSet::new)); }
public LineListTool(final List<T> raw, final Function<T, List<String>> extractor) { Set<String> set = raw.stream() .flatMap(w -> extractor.apply(w).stream()) .collect(Collectors.toCollection(LinkedHashSet::new)); lines = new ArrayList<>(set); indices = IntStream.range(0, lines.size()).boxed().collect(Collectors.toMap(lines::get, identity())); }
public Set<Module> getModulesByPid(String pid, String level) { Set<Module> modules = getModules() .stream() .filter( module -> module.getParentId().equals(pid) && hasAccessModuleAction(module.getId(), level)) .collect(Collectors.toCollection(() -> new LinkedHashSet<>())); return modules; }
public MapArray<K, V> where(BiFunction<K, V, Boolean> func) { try { return pairs .stream() .filter(pair -> func.apply(pair.key, pair.value)) .collect(Collectors.toCollection(MapArray::new)); } catch (Exception ignore) { throwRuntimeException(ignore); return null; } }
public RestrictConstraint( final List<String[]> roleGroups, final Optional<String> content, final ConstraintLogic constraintLogic) { this.content = content; roleGroups .stream() .filter(group -> group != null) .collect(Collectors.toCollection(() -> this.roleGroups)); this.constraintLogic = constraintLogic; }
public Graph(String line) { map = Arrays.stream( (line.trim() + " " + new StringBuilder(line.trim()).reverse().toString()) .split(" ")) .collect( Collectors.groupingBy( x -> ((String) x).charAt(0), Collectors.mapping( x -> ((String) x).charAt(1), Collectors.toCollection(LinkedHashSet<Character>::new)))); }
protected Set<AssociationValue> loadAssociationValues( EntityManager entityManager, Class<?> sagaType, String sagaIdentifier) { List<AssociationValueEntry> associationValueEntries = entityManager .createNamedQuery(FIND_ASSOCIATIONS_NAMED_QUERY, AssociationValueEntry.class) .setParameter("sagaType", getSagaTypeName(sagaType)) .setParameter("sagaId", sagaIdentifier) .getResultList(); return associationValueEntries .stream() .map(AssociationValueEntry::getAssociationValue) .collect(Collectors.toCollection(HashSet::new)); }
@Test public void alleVerschiedenenVornamenDerMaiersSortiertAlsLinkedList() { LinkedList<String> alleVornamenDerMaiers = newPersonenStream() // .filter(where(Person::getNachname).is("Maier")) // .map(Person::getVorname) // .distinct() // .sorted() // .collect(Collectors.toCollection(LinkedList::new)); assertThat(alleVornamenDerMaiers, hasSize(559)); assertThat(alleVornamenDerMaiers.get(0), is("Agnes")); assertThat(alleVornamenDerMaiers.get(558), is("Yvonne")); }
public List<Prediction> createPredictions(SecurityContext sec, List<Prediction> preds) { int originalSize = preds.size(); List<Prediction> filtered = preds .stream() .filter(p -> p.getText() != null && !p.getText().isEmpty()) .filter(p -> p.getText().length() > MIN_PREDICTION_LENGTH) .collect(Collectors.toCollection(ArrayList::new)); int newSize = filtered.size(); if (newSize != originalSize) LOGGER.info("Filtered predictions with large/small bodies: {}", originalSize - newSize); filtered.stream().forEach(p -> createPrediction(sec, p)); return preds; }
public List<ChainOfStone> getChainsOfStones() { List<Stone> stones = getStonesByPoint(point); List<ChainOfStone> chainOfStones = new ArrayList<>(); ChainOfStone tmpChainOfStone; for (Stone s : stones) { tmpChainOfStone = s.getChainOfStone(); if (tmpChainOfStone != null) { chainOfStones.add(tmpChainOfStone); } } GameLogger.getInstance().logg("dlugosc listy "); return chainOfStones.stream().distinct().collect(Collectors.toCollection(ArrayList::new)); }
@Override public void dequeue(Long branchId, Long userId) { List<QueueTransaction> queue = queues.getOrDefault(branchId, new LinkedList<>()); if (queue.stream().noneMatch(t -> t.getUserId() == userId)) { throw new NoSuchUserInQueueException(); } queues.computeIfPresent( branchId, (k, v) -> queue .stream() .filter(t -> t.getUserId() != userId) .collect(Collectors.toCollection(LinkedList::new))); }
@Override public void writeObjectArray( final String label, final Collection<? extends SerializableEntity> objects) { this.pushLabel(label); if (null == objects) { return; } final JSONArray jsonObjects = objects .stream() .map(this::serializeObject) .collect(Collectors.toCollection(JSONArray::new)); this.object.put(label, jsonObjects); }
/** GET /ordenCompras -> get all the ordenCompras. */ @RequestMapping( value = "/ordenCompras", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed @Transactional(readOnly = true) public ResponseEntity<List<OrdenCompraDTO>> getAllOrdenCompras(Pageable pageable) throws URISyntaxException { Page<OrdenCompra> page = ordenCompraRepository.findAll(pageable); HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/api/ordenCompras"); return new ResponseEntity<>( page.getContent() .stream() .map(ordenCompraMapper::ordenCompraToOrdenCompraDTO) .collect(Collectors.toCollection(LinkedList::new)), headers, HttpStatus.OK); }
/** GET /fases -> get all the fases. */ @RequestMapping( value = "/fases", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed @Transactional(readOnly = true) public ResponseEntity<List<FaseDTO>> getAll( @RequestParam(value = "page", required = false) Integer offset, @RequestParam(value = "per_page", required = false) Integer limit) throws URISyntaxException { Page<Fase> page = faseRepository.findAll(PaginationUtil.generatePageRequest(offset, limit)); HttpHeaders headers = PaginationUtil.generatePaginationHttpHeaders(page, "/api/fases", offset, limit); return new ResponseEntity<>( page.getContent() .stream() .map(faseMapper::faseToFaseDTO) .collect(Collectors.toCollection(LinkedList::new)), headers, HttpStatus.OK); }
private List<Long> loadPossibleTargetsIds() { WebElement table = webDriver.findElement(By.id(TABLE_ID)); List<WebElement> authors = table.findElements(By.className(AUTHOR_CELL_CLASS_NAME)); List<String> targetsUrls = authors .stream() .skip(1) // skip first player who is game admin or something .limit(MAX_AMOUNT_OF_TARGETS_IN_A_ROW) .map(e -> e.findElements(By.tagName("a"))) .filter(list -> list.size() == 1) // no <a> or more than one found? skip it. .map(list -> list.get(0)) .map(e -> e.getAttribute("href")) .collect(Collectors.toList()); return targetsUrls .stream() .map(this::extractTargetIdFromUrl) .filter(id -> id != 0) .collect(Collectors.toCollection(LinkedList::new)); }
@Test public void testCollectors() { ArrayList<Person> people = new ArrayList<>(); ArrayList<Integer> things = new ArrayList<>(); ArrayList<Employee> employees = new ArrayList<>(); ArrayList<Student> students = new ArrayList<>(); // Accumulate names into a List List<String> list = people.stream().map(Person::getName).collect(Collectors.toList()); // Accumulate names into a TreeSet Set<String> list2 = people.stream().map(Person::getName).collect(Collectors.toCollection(TreeSet::new)); // Convert elements to strings and concatenate them, separated by commas String joined = things.stream().map(Object::toString).collect(Collectors.joining(", ")); // Find highest-paid employee Optional<Employee> highestPaid = employees.stream().collect(Collectors.maxBy(Comparator.comparingInt(Employee::getSalary))); // Group employees by department Map<Department, List<Employee>> byDept = employees.stream().collect(Collectors.groupingBy(Employee::getDepartment)); // Find highest-paid employee by department Map<Department, Optional<Employee>> highestPaidByDept = employees .stream() .collect( Collectors.groupingBy( Employee::getDepartment, Collectors.maxBy(Comparator.comparingInt(Employee::getSalary)))); // Partition students into passing and failing Map<Boolean, List<Student>> passingFailing = students.stream().collect(Collectors.partitioningBy(s -> s.getGrade() >= PASS_THRESHOLD)); }
/** * Flattens out the provided metrics, collates them by "sample", and merges those collations. */ private static Collection<MendelianViolationMetrics> mergeMetrics( final Collection<Collection<MendelianViolationMetrics>> resultsToReduce) { final Collection<MendelianViolationMetrics> allMetrics = new ArrayList<>(); resultsToReduce.forEach(allMetrics::addAll); final Map<String, List<MendelianViolationMetrics>> sampleToMetricsMap = allMetrics .stream() .collect( Collectors.groupingBy( m -> String.format( "%s|%s|%s|%s", m.FAMILY_ID, m.FATHER, m.MOTHER, m.OFFSPRING))); return sampleToMetricsMap .values() .stream() .map(a -> (MendelianViolationMetrics) new MendelianViolationMetrics().merge(a)) .collect( Collectors.<MendelianViolationMetrics, List<MendelianViolationMetrics>>toCollection( ArrayList<MendelianViolationMetrics>::new)); }
/** GET /account -> get the current user. */ @RequestMapping( value = "/account", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<UserDTO> getAccount() { return Optional.ofNullable(userService.getUserWithAuthorities()) .map( user -> new ResponseEntity<>( new UserDTO( user.getLogin(), null, user.getFirstName(), user.getLastName(), user.getEmail(), user.getLangKey(), user.getAuthorities() .stream() .map(Authority::getName) .collect(Collectors.toCollection(LinkedList::new))), HttpStatus.OK)) .orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR)); }