/** * Get all the commands available to this CommandManager. This is potentially expensive. * * @param startsWith Limit to commands that start with this string. "" or "?" will return all * commands. * @param allowAdmin Allow admin-only commands? * @return Map of available commands. */ public Map<String, Command> getAllCommands(final String startsWith, final boolean allowAdmin) { final String sw = startsWith.toLowerCase(); final boolean allCommands = startsWith.isEmpty() || startsWith.equals("?"); // First get our own commands final Map<String, Command> result = knownCommands .entrySet() .parallelStream() .filter(e -> allowAdmin || !e.getValue().isAdminOnly()) .filter( e -> allCommands || e.getKey().startsWith(sw) || e.getKey().startsWith(HIDDEN_PREFIX + sw)) .collect(Collectors.toMap(Entry::getKey, Entry::getValue)); // Now all our submanagers' commands for (CommandManager subManager : subManagers) { result.putAll( subManager .getAllCommands(startsWith, allowAdmin) .entrySet() .parallelStream() .filter(entry -> !result.containsKey(entry.getKey())) .collect(Collectors.toMap(Entry::getKey, Entry::getValue))); } return result; }
protected List<String> serviceNamesNotFoundInZipkin(List<io.zipkin.Span> spans) { List<String> serviceNamesFoundInAnnotations = spans .stream() .filter(span -> span.annotations != null) .map(span -> span.annotations) .flatMap(Collection::stream) .filter(span -> span.endpoint != null) .map(annotation -> annotation.endpoint) .map(endpoint -> endpoint.serviceName) .distinct() .collect(Collectors.toList()); List<String> serviceNamesFoundInBinaryAnnotations = spans .stream() .filter(span -> span.binaryAnnotations != null) .map(span -> span.binaryAnnotations) .flatMap(Collection::stream) .filter(span -> span.endpoint != null) .map(annotation -> annotation.endpoint) .map(endpoint -> endpoint.serviceName) .distinct() .collect(Collectors.toList()); List<String> names = new ArrayList<>(); names.addAll(serviceNamesFoundInAnnotations); names.addAll(serviceNamesFoundInBinaryAnnotations); return names.contains(getAppName()) ? Collections.emptyList() : names; }
private void initializeExplicitConstructor( TurinTypeContructorDefinitionNode constructor, SymbolResolver resolver) { List<? extends FormalParameter> allParams = constructor.getParameters(); List<FormalParameter> paramsWithoutDefaultValues = allParams .stream() .filter((p) -> !p.hasDefaultValue()) .collect(Collectors.<FormalParameter>toList()); List<String> paramSignatures = paramsWithoutDefaultValues .stream() .map((p) -> p.getType().jvmType().getSignature()) .collect(Collectors.toList()); boolean hasDefaultParameters = allParams.stream().filter((p) -> p.hasDefaultValue()).findFirst().isPresent(); if (hasDefaultParameters) { paramSignatures.add("Ljava/util/Map;"); } JvmConstructorDefinition constructorDefinition = new JvmConstructorDefinition( jvmType().getInternalName(), "(" + String.join("", paramSignatures) + ")V"); constructors.add( new InternalConstructorDefinition( new ReferenceTypeUsage(this), allParams, constructorDefinition)); }
@Test public void vote() throws Exception { ObjectMapper mapper = new ObjectMapper(); Restaurant restaurantB = mapper.readValue(new StringReader(testRestaurantJson1), Restaurant.class); restaurantRepository.saveAndFlush(restaurantB); Restaurant restaurantA = mapper.readValue(new StringReader(testRestaurantJson2), Restaurant.class); restaurantRepository.saveAndFlush(restaurantA); doVote(restaurantB.getRestaurantName(), "qqq"); doVote(restaurantB.getRestaurantName(), "www"); doVote(restaurantB.getRestaurantName(), "ddd"); doVote(restaurantA.getRestaurantName(), "kkk"); doVote(restaurantA.getRestaurantName(), "lll"); List<Vote> votes = votesRepository.findAll(); // check all votes are here assertEquals(MAX_TOTAL_VOTES, votes.size()); Map<Long, Long> res = votes.stream().collect(Collectors.groupingBy(Vote::getRestaurantId, Collectors.counting())); // check all votes counter correctly assertEquals(3l, res.get(restaurantB.getId()).longValue()); assertEquals(2l, res.get(restaurantA.getId()).longValue()); }
public void initComponents() { competitors = DB.getCompetitors(turniej.getId()) .stream() .filter(c -> c.getGoesFinal()) .collect(Collectors.toList()); competitorMap = competitors.stream().collect(Collectors.toMap(c -> c.getId(), c -> c)); singleGames = DB.getSingleGames(turniej.getId(), true) .stream() .filter( sg -> competitorMap.containsKey(sg.getCompetitorW()) && competitorMap.containsKey(sg.getCompetitorB())) .collect(Collectors.toList()); // filtrowanie powyżej, bo baza zwraca również gry, // gdzie grali (dostał się do finałów) vs (nie dostał się) // można to naprawić w bazie for (Competitor c : competitors) { competitorGames.put(c, new LinkedList<>()); } for (SingleGame sg : singleGames) { competitorGames.get(competitorMap.get(sg.getCompetitorW())).add(sg); competitorGames.get(competitorMap.get(sg.getCompetitorB())).add(sg); } removeAll(); table = new JTable(new MyTableModel()); add(new JScrollPane(table)); updateTables(); }
@RequestMapping(value = "/api/group", method = RequestMethod.POST, produces = "application/json") public List<BroadcastGroup> createGroup( @RequestParam("name") String name, @RequestParam("setSize") Integer setSize) { if (broadcastRepository.findOne(name) == null) { AtomicInteger counter = new AtomicInteger(0); List<String> userIds = session .getUsers() .stream() .filter(slackUser -> !slackUser.isBot()) .map(SlackPersona::getId) .collect(Collectors.toList()); List<BroadcastSet> broadcastSets = Lists.partition(userIds, setSize) .stream() .map( set -> new BroadcastSet( counter.incrementAndGet(), set, new ArrayList<BroadcastMessage>())) .collect(Collectors.toList()); broadcastRepository.save(new BroadcastGroup(name, broadcastSets, new Date())); } else { log.error("Group with name {} already exist", name); } return broadcastRepository.findAll(); }
static <T> T unwrapOtherMonadTypes(Comprehender<T> comp, Object apply) { if (comp.instanceOfT(apply)) return (T) apply; if (apply instanceof Optional) { if (((Optional) apply).isPresent()) return comp.of(((Optional) apply).get()); return comp.empty(); } if (apply instanceof Stream) { return comp.of(((Stream) apply).collect(Collectors.toCollection(MaterializedList::new))); } if (apply instanceof IntStream) { return comp.of( ((IntStream) apply).boxed().collect(Collectors.toCollection(MaterializedList::new))); } if (apply instanceof DoubleStream) { return comp.of( ((DoubleStream) apply).boxed().collect(Collectors.toCollection(MaterializedList::new))); } if (apply instanceof LongStream) { return comp.of( ((LongStream) apply).boxed().collect(Collectors.toCollection(MaterializedList::new))); } if (apply instanceof CompletableFuture) { return comp.of(((CompletableFuture) apply).join()); } if (apply instanceof StreamT) return comp.of(((StreamT) apply).unwrap()); return (T) new ComprehenderSelector() .selectComprehender(apply) .resolveForCrossTypeFlatMap(comp, apply); }
public Theme( String name, List<String> cssRelativePaths, List<String> headJsRelativePaths, List<String> bodyJsRelativePaths) { this.name = name; String uriPrefix = UriUtils.getPublicUri(this) + "/"; this.cssTagSuffixes = cssRelativePaths .stream() .map( relativePath -> uriPrefix + relativePath + "\" rel=\"stylesheet\" type=\"text/css\" />") .collect(Collectors.toList()); this.headJsTagSuffixes = headJsRelativePaths .stream() .map(relativePath -> uriPrefix + relativePath + "\" type=\"text/javascript\"></script>") .collect(Collectors.toList()); this.bodyJsTagSuffixes = bodyJsRelativePaths .stream() .map(relativePath -> uriPrefix + relativePath + "\" type=\"text/javascript\"></script>") .collect(Collectors.toList()); }
public static void main(String[] args) { // stream.collect(collector): values // Collectors.groupingBy(classifier) Stream<Locale> locales = Stream.of(Locale.getAvailableLocales()); Map<String, List<Locale>> languageToLocales = locales.collect(Collectors.groupingBy(Locale::getDisplayLanguage)); System.out.println(languageToLocales); System.out.println("--------------"); // stream.collect(collector): values // Collectors.groupingBy(classifier, downstream) locales = Stream.of(Locale.getAvailableLocales()); Map<String, Long> languageToLocalesCounting = locales.collect(Collectors.groupingBy(Locale::getDisplayLanguage, Collectors.counting())); System.out.println(languageToLocalesCounting); System.out.println("--------------"); // stream.collect(collector): values // Collectors.partitioningBy(predicate) locales = Stream.of(Locale.getAvailableLocales()); Map<Boolean, List<Locale>> englishAndOtherLocales = locales.collect(Collectors.partitioningBy(l -> l.getLanguage().equals("en"))); List<Locale> englishLocales = englishAndOtherLocales.get(true); System.out.println(englishLocales); System.out.println("--------------"); }
public void moveToGroup(List<BibEntry> entries, NamedCompound undoAll) { List<GroupTreeNode> groupsContainingEntries = node.getNode() .getRoot() .getContainingGroups(entries, false) .stream() .filter(node -> node.getGroup().supportsRemove()) .collect(Collectors.toList()); List<AbstractGroup> affectedGroups = groupsContainingEntries.stream().map(GroupTreeNode::getGroup).collect(Collectors.toList()); affectedGroups.add(node.getNode().getGroup()); if (!WarnAssignmentSideEffects.warnAssignmentSideEffects(affectedGroups, panel.frame())) { return; // user aborted operation } // first remove for (GroupTreeNode group : groupsContainingEntries) { Optional<EntriesGroupChange> undoRemove = group.getGroup().remove(entries); if (undoRemove.isPresent()) { undoAll.addEdit(UndoableChangeEntriesOfGroup.getUndoableEdit(node, undoRemove.get())); } } // then add Optional<EntriesGroupChange> undoAdd = node.addEntriesToGroup(entries); if (undoAdd.isPresent()) { undoAll.addEdit(UndoableChangeEntriesOfGroup.getUndoableEdit(node, undoAdd.get())); } }
@Transactional public void update(final Employee employee) { // Following is added to prevent null values and empty assignment // objects getting persisted employee.setAssignments( employee .getAssignments() .parallelStream() .filter(assignment -> assignment.getPosition() != null) .collect(Collectors.toList())); for (final Assignment assign : employee.getAssignments()) { assign.setEmployee(employee); assign.setDepartment(assign.getDepartment()); for (final HeadOfDepartments hod : assign.getDeptSet()) hod.setAssignment(assign); } employee.setJurisdictions( employee .getJurisdictions() .parallelStream() .filter( Jurisdictions -> Jurisdictions.getBoundaryType() != null && Jurisdictions.getBoundary() != null) .collect(Collectors.toList())); for (final Jurisdiction jurisdiction : employee.getJurisdictions()) { jurisdiction.setEmployee(employee); jurisdiction.setBoundaryType(jurisdiction.getBoundaryType()); jurisdiction.setBoundary(jurisdiction.getBoundary()); } employeeRepository.saveAndFlush(employee); }
@Transactional public void create(final Employee employee) { employee.setPwdExpiryDate( new DateTime().plusDays(applicationProperties.userPasswordExpiryInDays()).toDate()); employee.setPassword(passwordEncoder.encode(EisConstants.DEFAULT_EMPLOYEE_PWD)); // Following is added to prevent null values and empty assignment // objects getting persisted employee.setAssignments( employee .getAssignments() .parallelStream() .filter(assignment -> assignment.getPosition() != null) .collect(Collectors.toList())); for (final Assignment assign : employee.getAssignments()) { assign.setEmployee(employee); assign.setDepartment(assign.getDepartment()); for (final HeadOfDepartments hod : assign.getDeptSet()) hod.setAssignment(assign); } employee.setJurisdictions( employee .getJurisdictions() .parallelStream() .filter( Jurisdictions -> Jurisdictions.getBoundaryType() != null && Jurisdictions.getBoundary() != null) .collect(Collectors.toList())); for (final Jurisdiction jurisdiction : employee.getJurisdictions()) { jurisdiction.setEmployee(employee); jurisdiction.setBoundaryType(jurisdiction.getBoundaryType()); jurisdiction.setBoundary(jurisdiction.getBoundary()); } employee.getRoles().add(roleService.getRoleByName(EisConstants.ROLE_EMPLOYEE)); employeeRepository.save(employee); }
private final void bindResourceToLocalContainer( final Class<?> resource, final Class<?> container) { final Set<Method> nonAbstractMethods = Sets.newHashSet(resource.getMethods()) .stream() .filter(method -> !Modifier.isAbstract(method.getModifiers())) .collect(Collectors.toSet()); Preconditions.checkState( !nonAbstractMethods.isEmpty(), "Found non-abstract methods in " + resource + ": " + nonAbstractMethods); final Set<Method> abstractMethods = Sets.newHashSet(resource.getMethods()) .stream() .filter(method -> Modifier.isAbstract(method.getModifiers())) .collect(Collectors.toSet()); for (final Method resourceMethod : abstractMethods) { final Method containerMethod = findMatchingMethod(container, resourceMethod); if (containerMethod != null) { this.resourceToContainer.put(resourceMethod, containerMethod); } } bindResourceToContainer(resource, injector.getInstance(container)); }
public static void main(String[] args) throws InterruptedException, AlreadyConnectedException, JIException, IOException, NotConnectedException, DuplicateGroupException, AddFailedException { final FileWriter fileWriter = new FileWriter("data.csv"); fileWriter.write("sep=,\n"); fileWriter.flush(); CSVWriter writer = new CSVWriter(fileWriter, ',', '\0'); writer.writeNext( Tag.TAG_TO_ID_MAPPING .entrySet() .stream() .map(tagStringEntry -> tagStringEntry.getKey().toString()) .collect(Collectors.toCollection(LinkedList::new)) .toArray(new String[0])); final OPCDataReader opcDataReader = new OPCDataReader(Tag.TAG_TO_ID_MAPPING).startReading(); Thread.sleep(2000); while (true) { final Map<Tag, Double> actualValues = opcDataReader.getActualValues(); System.out.println(actualValues); final String[] data = Tag.TAG_TO_ID_MAPPING .entrySet() .stream() .map(tagStringEntry -> "" + actualValues.get(tagStringEntry.getKey())) .collect(Collectors.toCollection(LinkedList::new)) .toArray(new String[0]); writer.writeNext(data); writer.flush(); Thread.sleep(1000); } }
/** * Ngrams counter. * * @param nGramSpec the n gram spec * @return the counter */ default Counter<Tuple> ngrams(@NonNull NGramSpec nGramSpec) { return nGramSpec .getValueCalculator() .adjust( new HashMapCounter<>( stream() .flatMap( doc -> doc.ngrams( nGramSpec.getAnnotationType(), nGramSpec.getMin(), nGramSpec.getMax()) .stream() .filter(nGramSpec.getFilter()) .map( hString -> $( hString .get(nGramSpec.getAnnotationType()) .stream() .map(nGramSpec.getToStringFunction()) .collect(Collectors.toList()))) .collect(Collectors.toList())) .countByValue())); }
@Override public Set<ArtifactSpec> resolveAll(final Set<ArtifactSpec> specs) { resetListeners(); final MavenResolvedArtifact[] artifacts; try { artifacts = this.resolver .resolve(specs.stream().map(ArtifactSpec::mavenGav).collect(Collectors.toList())) .withTransitivity() .as(MavenResolvedArtifact.class); } finally { completeTransferListener(); } return Arrays.stream(artifacts) .map( artifact -> { final MavenCoordinate coord = artifact.getCoordinate(); return new ArtifactSpec( "compile", coord.getGroupId(), coord.getArtifactId(), coord.getVersion(), coord.getPackaging().getId(), coord.getClassifier(), artifact.asFile()); }) .collect(Collectors.toSet()); }
@Test public void test() throws InterruptedException { ObstructionFreeStm stm = new ObstructionFreeStm(); List<Memory<Integer>> memories = IntStream.range(0, nMemories) // .mapToObj(i -> stm.create(0)) .collect(Collectors.toList()); List<Worker> workers = IntStream.range(0, nTransactions) // .mapToObj(i -> new Worker(stm)) .collect(Collectors.toList()); List<Integer> workingOrders = new ArrayList<>(); IntStream.range(0, nMemories * 2 + 1) .forEach(mi -> IntStream.range(0, nTransactions).forEach(workingOrders::add)); Collections.shuffle(workingOrders, random); for (int workingOrder : workingOrders) workers.get(workingOrder).work(memories); stm.transaction( transaction -> { int sum = 0; for (Memory<Integer> memory : memories) { int read = stm.get(transaction, memory); System.out.println("FINAL MEMORY VALUE = " + read); sum += read; } if (sum != 0) throw new RuntimeException("Final sum is not zero, but is " + sum); return true; }); }
private void walk(String authority, Path root, String prefix, int count) throws IOException { Path cachePath = BASE_PATH.resolve(authority); Path dirPath = Paths.get(cachePath.toString(), root.toString()); Set<String> fileNames = Files.walk(dirPath) .filter(p -> p.toString().endsWith(".txt")) .map(p -> Paths.get(p.toAbsolutePath().toString())) .map(cachePath::relativize) .map(Object::toString) .collect(Collectors.toSet()); assertFalse(fileNames.isEmpty()); Set<String> expected = IntStream.range(0, count).mapToObj(i -> prefix + i + ".txt").collect(Collectors.toSet()); assertFalse(expected.isEmpty()); Set<String> extra = new HashSet<>(fileNames); extra.removeAll(expected); if (!extra.isEmpty()) { System.out.println("Extra entries " + extra); } assertTrue("Extra entries ", extra.isEmpty()); Set<String> missing = new HashSet<>(expected); missing.removeAll(fileNames); if (!extra.isEmpty()) { System.out.println("Missing entries " + missing); } assertTrue("Missing entries", missing.isEmpty()); }
@Override public IPersonAttributes getPerson(final String uid) { final AttributeResolutionContext attributeResolutionContext = new AttributeResolutionContext(); attributeResolutionContext.setPrincipal(uid); try { this.attributeResolver.resolveAttributes(attributeResolutionContext); final Map<String, List<Object>> attributes = attributeResolutionContext .getResolvedIdPAttributes() .entrySet() .stream() .collect( Collectors.toMap( Map.Entry::getKey, p -> p.getValue() .getValues() .stream() .map(IdPAttributeValue::getValue) .collect(Collectors.toList()))); return new NamedPersonImpl(uid, attributes); } catch (final ResolutionException e) { throw Throwables.propagate(e); } }
public static void main(String[] args) { Scanner sc = new Scanner(System.in); String[] numbers = sc.nextLine().split(" "); String sort = sc.nextLine(); if (sort.equals("Ascending")) { List<Integer> output = Arrays.stream(numbers).map(Integer::parseInt).sorted().collect(Collectors.toList()); for (Object items : output) { System.out.print(items + " "); } } else if (sort.equals("Descending")) { List<Integer> output = Arrays.stream(numbers) .map(Integer::parseInt) .sorted(Comparator.reverseOrder()) .collect(Collectors.toList()); for (Object items : output) { System.out.print(items + " "); } } }
public static void main(String[] args) { AccessIdentifiers2 ac = new AccessIdentifiers2(); System.out.println(ac.getClass().getName()); System.out.println( Arrays.stream(ac.getClass().getDeclaredFields()) .map(Field::getName) .collect(Collectors.joining(" || "))); ; System.out.println( Arrays.stream(ac.getClass().getDeclaredMethods()) .map(Method::getName) .collect(Collectors.joining(" || "))); System.out.println( Arrays.stream(ac.getClass().getDeclaredMethods()) .map(Method::getReturnType) .map(Class::getName) .collect(Collectors.joining(" || "))); System.out.println( Arrays.stream(ac.getClass().getInterfaces()) .map(Class::getName) .collect(Collectors.joining(" || "))); System.out.println(ac.getClass().getDeclaredFields().length); System.out.println(ac.getClass().getDeclaredMethods().length); }
private static List<Double> getWeightVectorForClass( Map<String, List<LinkedHashMap<String, Object>>> documents, String key, List<Integer> featureIndexList, GraphDatabaseService db) { List<Double> weightVector; Transaction tx = db.beginTx(); // Get class id Long classId = db.findNodesByLabelAndProperty(DynamicLabel.label("Class"), "name", key) .iterator() .next() .getId(); // Get weight vector for class List<Long> longs = documents .get(key) .stream() .map(a -> ((Integer) a.get("feature")).longValue()) .collect(Collectors.toList()); weightVector = featureIndexList .stream() .map(i -> longs.contains(i.longValue()) ? tfidf(db, i.longValue(), classId) : 0.0) .collect(Collectors.toList()); tx.success(); tx.close(); return weightVector; }
public static void createPackageJar(List<Member> members, PackageVersion version) { // These members should be collect before insert memberships, Right now // we don't need UploadedPackageMember InvocationContext context = InvocationContext.get(); List<Instance> packageMembers = context.getDbChanges().getAdded(); List<UploadedPackageMember> uploadMemberships = members .stream() // Prepare UploadedPackageMember from package's component .map(m -> createUploadedMembership(m.getClonedComponent(), m.getMembership(), version)) // Collect to list .collect(Collectors.toList()); List<UploadedPackageMember> createUploadPackageMembers = createUploadPackageMembers(UploadingPackage.getMembers(), version); uploadMemberships.addAll(createUploadPackageMembers); // Save Uploaded Package Membership context.getDatabase().upsert(uploadMemberships, DMLOperationType.THROW_ERRORS); List<Instance> allInstances = uploadMemberships.stream().map(m -> m.getComponent()).collect(Collectors.toList()); prepareJAR(version, allInstances); prepatePackageData(version, packageMembers); }
/** * Updates, creates or deletes UDI events using a given list of UDI Event DTOs. * * @param period {@link LocalDate} period for which UDI Events will be created/updated/deleted. * @param updatedUdiEvents {@link List} of {@link UdiEventDto}. */ public void updateUdiEvents(LocalDate period, List<UdiEventDto> updatedUdiEvents) { LOGGER.debug("Updating udi events for period [{}]", period); // fetch existing udi events Map<String, UdiEvent> udiEventsPerId = udiEventRepository .findUdiEventsForPeriod(period) .stream() .collect(Collectors.toMap(UdiEvent::getId, Function.identity())); // 1. Update or create UdiEvents for (UdiEventDto udiEventDto : updatedUdiEvents) { if (!udiEventsPerId.containsKey(udiEventDto.getId())) { createUdiEventFromDto(udiEventDto, period); } else { updateUdiEventFromDto(udiEventsPerId.get(udiEventDto.getId()), udiEventDto); } } // 2. Delete udi events not present in the list anymore Set<String> updatedIds = updatedUdiEvents.stream().map(UdiEventDto::getId).collect(Collectors.toSet()); udiEventsPerId .entrySet() .stream() .filter(udiEventPerId -> !updatedIds.contains(udiEventPerId.getKey())) .forEach( udiEventPerId -> { LOGGER.debug("Deleted udi event with id [{}]", udiEventPerId.getKey()); udiEventRepository.delete(udiEventPerId.getValue()); }); }
public static List<Method> getMethods(final Object object) { final List<Method> methods = ReflectionUtils.getAllMethods(object.getClass()) .stream() // .filter( method -> !IGNORE_BY_CLASSNAME.contains(method.getDeclaringClass().getName())) // .filter(method -> !method.getName().contains("$jacoco")) // .sorted((method1, method2) -> method1.getName().compareTo(method2.getName())) // .collect(Collectors.toList()); final Set<String> uniqueNames = Sets.newHashSet(); // TODO handle overloaded methods with same name correct (now they will filtered) return methods .stream() .filter( method -> { if (uniqueNames.contains(method.getName())) { return false; } uniqueNames.add(method.getName()); return true; }) .filter( method -> !method.getDeclaringClass().equals(ConsoleScript.class) || !method.getName().equals("run")) .collect(Collectors.toList()); }
private void deleteNotExisting(SynchronizationBusinessDataRequest request) { final Long departmentId = request.getDepartmentId(); final List<Long> newDepartmentIds = request.getDepartment().stream().map(Department::getId).collect(Collectors.toList()); final List<Long> newTransporterIds = request.getTransporter().stream().map(Transporter::getId).collect(Collectors.toList()); final List<Long> newEployeeIds = request.getEmployee().stream().map(Employee::getId).collect(Collectors.toList()); final List<Long> transitIds = request.getTransit().stream().map(Transit::getId).collect(Collectors.toList()); final List<Long> oldDepartmentIds = departmentRepository.selectIds(); final List<Long> oldTransporterIds = transporterRepository.selectIds(); final List<Long> oldEmployeeIds = employeeRepository.selectIds(); final List<Long> oldTransitIds = transitRepository.selectIds(); // oldDepartmentIds.stream().filter(oldId -> // !newDepartmentIds.contains(oldId)).forEach(departmentRepository::delete); // oldTransporterIds.stream().filter(oldId -> // !newTransporterIds.contains(oldId)).forEach(transporterRepository::delete); // oldEmployeeIds.stream().filter(oldId -> // !newEployeeIds.contains(oldId)).forEach(employeeRepository::delete); // oldTransitIds.stream().filter(oldId -> // !transitIds.contains(oldId)).forEach(transitRepository::delete); }
@Override DebugConfig getConfig() { DebugConfig config = DebugScope.getConfig(); List<DebugDumpHandler> dumpHandlers = config == null ? new ArrayList<>() : config.dumpHandlers().stream().collect(Collectors.toList()); List<DebugVerifyHandler> verifyHandlers = config == null ? new ArrayList<>() : config.verifyHandlers().stream().collect(Collectors.toList()); GraalDebugConfig debugConfig = new GraalDebugConfig( GraalDebugConfig.Options.Log.getValue(), "" /* unscoped meter */, GraalDebugConfig.Options.TrackMemUse.getValue(), "" /* unscoped time */, GraalDebugConfig.Options.Dump.getValue(), GraalDebugConfig.Options.Verify.getValue(), null /* no method filter */, "" /* unscoped method metering */, System.out, dumpHandlers, verifyHandlers); return debugConfig; }
public Opml export(User user) { Opml opml = new Opml(); opml.setFeedType("opml_1.1"); opml.setTitle(String.format("%s subscriptions in CommaFeed", user.getName())); opml.setCreated(new Date()); List<FeedCategory> categories = feedCategoryDAO.findAll(user); Collections.sort( categories, (e1, e2) -> MoreObjects.firstNonNull(e1.getPosition(), 0) - MoreObjects.firstNonNull(e2.getPosition(), 0)); List<FeedSubscription> subscriptions = feedSubscriptionDAO.findAll(user); Collections.sort( subscriptions, (e1, e2) -> MoreObjects.firstNonNull(e1.getPosition(), 0) - MoreObjects.firstNonNull(e2.getPosition(), 0)); // export root categories for (FeedCategory cat : categories.stream().filter(c -> c.getParent() == null).collect(Collectors.toList())) { opml.getOutlines().add(buildCategoryOutline(cat, categories, subscriptions)); } // export root subscriptions for (FeedSubscription sub : subscriptions.stream().filter(s -> s.getCategory() == null).collect(Collectors.toList())) { opml.getOutlines().add(buildSubscriptionOutline(sub)); } return opml; }
@Test public void testRead() throws Exception { Model shapesModel = RDFReaderFactory.createResourceReader(shapeResource).read(); List<Shape> shapes = shapesModel .listResourcesWithProperty(RDF.type, SHACL.Shape) .toList() .stream() .map(r -> ShapeReader.create().read(r)) .collect(Collectors.toList()); assertThat(shapes).hasSize(1); Shape sh = shapes.get(0); assertThat(sh.getScopes()).hasSize(ShapeScopeType.values().length); List<ShapeScopeType> scopeTypes = sh.getScopes() .stream() .map(ShapeScope::getScopeType) .distinct() .collect(Collectors.toList()); // distinct scopes assertThat(scopeTypes).hasSize(ShapeScopeType.values().length); }
@Test public void testFindAllByIds() { List<TestEntity> values = Arrays.asList(1, 2) .stream() .map( v -> { TestEntity t = new TestEntity(); t.setStringProperty(String.format("Hello %s time(s)", v)); return t; }) .collect(Collectors.toList()); repository.save(values); List<TestEntity> actual = (List<TestEntity>) repository.findAll(values.stream().map(TestEntity::getId).collect(Collectors.toList())); assertNotNull("Checking that the result is not null.", actual); assertEquals(2, actual.size()); Map<String, TestEntity> expected = values.stream().collect(toMap(TestEntity::getId, Function.identity())); actual.forEach( testEntity -> assertEquals( expected.get(testEntity.getId()).getStringProperty(), testEntity.getStringProperty())); }