private void walk(String authority, Path root, String prefix, int count) throws IOException { Path cachePath = BASE_PATH.resolve(authority); Path dirPath = Paths.get(cachePath.toString(), root.toString()); Set<String> fileNames = Files.walk(dirPath) .filter(p -> p.toString().endsWith(".txt")) .map(p -> Paths.get(p.toAbsolutePath().toString())) .map(cachePath::relativize) .map(Object::toString) .collect(Collectors.toSet()); assertFalse(fileNames.isEmpty()); Set<String> expected = IntStream.range(0, count).mapToObj(i -> prefix + i + ".txt").collect(Collectors.toSet()); assertFalse(expected.isEmpty()); Set<String> extra = new HashSet<>(fileNames); extra.removeAll(expected); if (!extra.isEmpty()) { System.out.println("Extra entries " + extra); } assertTrue("Extra entries ", extra.isEmpty()); Set<String> missing = new HashSet<>(expected); missing.removeAll(fileNames); if (!extra.isEmpty()) { System.out.println("Missing entries " + missing); } assertTrue("Missing entries", missing.isEmpty()); }
// Adds any missing device ports. private void addMissingPorts(Device device) { try { List<Port> ports = deviceService.getPorts(device.id()); Set<ConnectPoint> existing = ports .stream() .map(p -> new ConnectPoint(device.id(), p.number())) .collect(Collectors.toSet()); Set<ConnectPoint> missing = connectPoints .stream() .filter(cp -> cp.deviceId().equals(device.id())) .filter(cp -> !existing.contains(cp)) .collect(Collectors.toSet()); if (!missing.isEmpty()) { List<PortDescription> newPorts = Stream.concat( ports.stream().map(this::description), missing.stream().map(this::description)) .collect(Collectors.toList()); deviceProviderService.updatePorts(device.id(), newPorts); } } catch (IllegalArgumentException e) { log.warn("Error pushing ports: {}", e.getMessage()); } }
private final void bindResourceToLocalContainer( final Class<?> resource, final Class<?> container) { final Set<Method> nonAbstractMethods = Sets.newHashSet(resource.getMethods()) .stream() .filter(method -> !Modifier.isAbstract(method.getModifiers())) .collect(Collectors.toSet()); Preconditions.checkState( !nonAbstractMethods.isEmpty(), "Found non-abstract methods in " + resource + ": " + nonAbstractMethods); final Set<Method> abstractMethods = Sets.newHashSet(resource.getMethods()) .stream() .filter(method -> Modifier.isAbstract(method.getModifiers())) .collect(Collectors.toSet()); for (final Method resourceMethod : abstractMethods) { final Method containerMethod = findMatchingMethod(container, resourceMethod); if (containerMethod != null) { this.resourceToContainer.put(resourceMethod, containerMethod); } } bindResourceToContainer(resource, injector.getInstance(container)); }
private static void testCorruptRecord( BiConsumer<LogTransaction, SSTableReader> modifier, boolean isRecoverable) throws IOException { ColumnFamilyStore cfs = MockSchema.newCFS(KEYSPACE); File dataFolder = new Directories(cfs.metadata).getDirectoryForNewSSTables(); SSTableReader sstableOld = sstable(dataFolder, cfs, 0, 128); SSTableReader sstableNew = sstable(dataFolder, cfs, 1, 128); // simulate tracking sstables with a committed transaction except the checksum will be wrong LogTransaction log = new LogTransaction(OperationType.COMPACTION); assertNotNull(log); log.trackNew(sstableNew); log.obsoleted(sstableOld); // Modify the transaction log or disk state for sstableOld modifier.accept(log, sstableOld); assertNull(log.complete(null)); sstableOld.selfRef().release(); sstableNew.selfRef().release(); // The files on disk, for old files make sure to exclude the files that were deleted by the // modifier Set<String> newFiles = sstableNew.getAllFilePaths().stream().collect(Collectors.toSet()); Set<String> oldFiles = sstableOld .getAllFilePaths() .stream() .filter(p -> new File(p).exists()) .collect(Collectors.toSet()); // This should filter as in progress since the last record is corrupt assertFiles(newFiles, getTemporaryFiles(dataFolder)); assertFiles(oldFiles, getFinalFiles(dataFolder)); if (isRecoverable) { // the corruption is recoverable but the commit record is unreadable so the // transaction is still in progress // This should remove new files LogTransaction.removeUnfinishedLeftovers(cfs.metadata); // make sure to exclude the old files that were deleted by the modifier assertFiles(dataFolder.getPath(), oldFiles); } else { // if an intermediate line was also modified, it should ignore the tx log file // This should not remove any files LogTransaction.removeUnfinishedLeftovers(cfs.metadata); assertFiles( dataFolder.getPath(), Sets.newHashSet(Iterables.concat(newFiles, oldFiles, log.logFilePaths()))); } }
private void mergeIntermediateStates() { Map<Adjacency, Set<FSANode>> adjToNodes = new HashMap<>(); Set<FSANode> newCurMergeLevel = new HashSet<>(); Map<FSANode, FSANode> formerNodeToMergedNode = new HashMap<>(); for (FSANode node : curMergeLevel) { boolean needToBreak = false; for (Set<FSANode> targets : node.getLabelToTargets().values()) { if (targets.size() > 1) { updateBasedOnMerges(targets, formerNodeToMergedNode); FSANode newNode = mergeNodes(targets); targets.stream().forEach(n -> formerNodeToMergedNode.put(n, newNode)); if (newNode.needsToMergeOutgoingEdges()) { newCurMergeLevel.add(newNode); } needToBreak = true; } } if (needToBreak) { break; } for (FSAEdge edge : node.getOutgoingEdges()) { Adjacency adj = new Adjacency(edge); Set<FSANode> nodesWithSameLabelToSameTarget = adjToNodes.get(adj); if (nodesWithSameLabelToSameTarget == null) { nodesWithSameLabelToSameTarget = new HashSet<>(); adjToNodes.put(adj, nodesWithSameLabelToSameTarget); } nodesWithSameLabelToSameTarget.add(node); } } for (FSANode node : curMergeLevel) { newCurMergeLevel.addAll( node.getIncomingEdges().stream().map(e -> e.getSource()).collect(Collectors.toSet())); } for (Set<FSANode> nodes : adjToNodes.values()) { updateBasedOnMerges(nodes, formerNodeToMergedNode); if (nodes.size() > 1) { FSANode newNode = mergeNodes(nodes); if (!Collections.disjoint(newCurMergeLevel, nodes)) { newCurMergeLevel.removeAll(nodes); newCurMergeLevel.add(newNode); } for (FSANode node : nodes) { formerNodeToMergedNode.put(node, newNode); } } } // remove any nodes that are now contained by other nodes curMergeLevel = newCurMergeLevel.stream().filter(n -> this.nodes.contains(n)).collect(Collectors.toSet()); }
@Test public void testRemoveUnfinishedLeftovers_abort_multipleFolders() throws Throwable { ColumnFamilyStore cfs = MockSchema.newCFS(KEYSPACE); File origiFolder = new Directories(cfs.metadata).getDirectoryForNewSSTables(); File dataFolder1 = new File(origiFolder, "1"); File dataFolder2 = new File(origiFolder, "2"); Files.createDirectories(dataFolder1.toPath()); Files.createDirectories(dataFolder2.toPath()); SSTableReader[] sstables = { sstable(dataFolder1, cfs, 0, 128), sstable(dataFolder1, cfs, 1, 128), sstable(dataFolder2, cfs, 2, 128), sstable(dataFolder2, cfs, 3, 128) }; LogTransaction log = new LogTransaction(OperationType.COMPACTION); assertNotNull(log); LogTransaction.SSTableTidier[] tidiers = { log.obsoleted(sstables[0]), log.obsoleted(sstables[2]) }; log.trackNew(sstables[1]); log.trackNew(sstables[3]); Collection<File> logFiles = log.logFiles(); Assert.assertEquals(2, logFiles.size()); // fake an abort log.txnFile().abort(); Arrays.stream(sstables).forEach(s -> s.selfRef().release()); // test listing Assert.assertEquals( sstables[1].getAllFilePaths().stream().map(File::new).collect(Collectors.toSet()), getTemporaryFiles(dataFolder1)); Assert.assertEquals( sstables[3].getAllFilePaths().stream().map(File::new).collect(Collectors.toSet()), getTemporaryFiles(dataFolder2)); // normally called at startup LogTransaction.removeUnfinishedLeftovers(Arrays.asList(dataFolder1, dataFolder2)); // old tables should be only table left assertFiles(dataFolder1.getPath(), new HashSet<>(sstables[0].getAllFilePaths())); assertFiles(dataFolder2.getPath(), new HashSet<>(sstables[2].getAllFilePaths())); // complete the transaction to avoid LEAK errors Arrays.stream(tidiers).forEach(LogTransaction.SSTableTidier::run); assertNull(log.complete(null)); }
private void onePlayerMissing(Set<JoinedPlayer> playersInGame) { Set<String> playerNames = playersInGame.stream().map(JoinedPlayer::getPlayer).collect(Collectors.toSet()); Set<String> missingPlayers = playerRepository .findAll() .stream() .map(PlayerEntity::getJabber) .filter(p -> !playerNames.contains(p)) .collect(Collectors.toSet()); missingLastPlayer(missingPlayers); }
/** get all the unique values for the filters */ public static Object values(Request req, Response res) { Values ret = new Values(); ret.commit = StatsMain.store.commit.stream().map(c -> c.a).collect(Collectors.toSet()); ret.instanceType = StatsMain.store.instanceType.stream().map(c -> c.a).collect(Collectors.toSet()); ret.graphId = StatsMain.store.graphId.stream().map(c -> c.a).collect(Collectors.toSet()); ret.jobId = StatsMain.store.jobId.stream().map(c -> c.a).collect(Collectors.toSet()); return ret; }
public static EntityContextDTO createEntityContextDTO( Activity activity, Entity entityContext, MulCondition mulCondition, WorkflowInstance workflowInstance) { Map<Entity, Set<EntityInstance>> instanceContext = activity.getInstanceContext(workflowInstance); EntityContextDTO entityContextDTO = new EntityContextDTO(); entityContextDTO.setEntity(entityContext.getDTO()); if (mulCondition == null) { MulConditionDTO mulConditionDTO = new MulConditionDTO(); mulConditionDTO.setCardinality("1"); mulConditionDTO.setMin(1); mulConditionDTO.setMax(1); mulConditionDTO.setRolePath(entityContext.getName()); entityContextDTO.setMulCondition(mulConditionDTO); entityContextDTO.setDefPathConditionSet( activity .getPreConditionSet() .stream() .filter(d -> d.getSourceOfPath() == entityContext) .map(d -> d.getDTO(entityContext.getDataModel().getSpecification().getSpecId())) .collect(Collectors.toSet())); } else { entityContextDTO.setMulCondition(mulCondition.getDTO()); entityContextDTO.setDefPathConditionSet( activity .getPreConditionSet() .stream() .filter( d -> d.getSourceOfPath() == mulCondition.getSourceEntity() && d.getSourceOfPath() != d.getTargetOfPath() && d.getPath().getAdjacent() == entityContext) .map(d -> d.getDTO(entityContext.getDataModel().getSpecification().getSpecId())) .collect(Collectors.toSet())); } Set<EntityInstanceContextDTO> entityInstanceContextDTOs = new HashSet<EntityInstanceContextDTO>(); entityContextDTO.setEntityInstanceContextSet(entityInstanceContextDTOs); for (EntityInstance entityInstance : instanceContext.get(entityContext)) { entityInstanceContextDTOs.add( EntityInstanceContextDTO.createEntityInstanceContextDTO( entityContextDTO, entityInstance)); } return entityContextDTO; }
public Set<Challenge> getSolvedChallengesOf(User user) { return solutionDao .findApprovedChallengeSolutionsByUser(user) .stream() .map(Solution::getChallenge) .collect(Collectors.toSet()); }
public Set<String> getEnginesEnabledAt(NotificationType type, Status status, Calendar time) { return this.getEngines() .stream() .filter((e) -> e.isEnabledAt(type, status, time)) .map(NotificationEngine::getEngine) .collect(Collectors.toSet()); }
protected static Set<IDegreeModuleToEvaluate> getEnroledAndEnroling( final EnrolmentContext enrolmentContext, final Predicate<IDegreeModuleToEvaluate> predicate) { return getEnroledAndEnroling(enrolmentContext) .stream() .filter(predicate) .collect(Collectors.toSet()); }
private void assertCorrectExpressionExtracted( Set<PsiCallExpression> extractions, String... criteria) { Set<String> extractionStrings = extractions.stream().map(PsiElement::getText).collect(Collectors.toSet()); allCriteriaExtracted(extractionStrings, criteria); noFalsePositives(extractionStrings, criteria); }
/** Return SUID of root networks */ @GET @Path("/") @Produces(MediaType.APPLICATION_JSON + "; charset=UTF-8") public Collection<Long> getCollectionsAsSUID(@QueryParam("subsuid") Long subsuid) { if (subsuid == null) { // Return all collection SUIDs return getRootNetworks().stream().map(root -> root.getSUID()).collect(Collectors.toSet()); } else { // Return parent collection's SUID final CyNetwork subnetwork = networkManager.getNetwork(subsuid); if (subnetwork == null) { throw new NotFoundException(); } final CyRootNetwork root = cyRootNetworkManager.getRootNetwork(subnetwork); if (root == null) { throw new NotFoundException(); } else { final List<Long> rootId = new ArrayList<>(); rootId.add(root.getSUID()); return rootId; } } }
@Override public Set<MoveBlock> checkMoveBlocks(Moveable moveable, Field target) { return getMoveBlocks() .stream() .filter(moveBlock -> moveBlock.blocks(moveable, target)) .collect(Collectors.toSet()); }
public ActionForward searchPerson( ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws FenixServiceException { final SimpleSearchPersonWithStudentBean searchPersonBean = (SimpleSearchPersonWithStudentBean) getObjectFromViewState("searchPersonBean"); request.setAttribute("searchPersonBean", searchPersonBean); Collection<Person> persons = searchPersonBean.search(); request.removeAttribute("sizeWarning"); if (persons.size() == 1) { request.setAttribute("personId", persons.iterator().next().getExternalId()); return showOperations(mapping, form, request, response); } if (persons.size() > 50) { persons = persons.stream().limit(50).collect(Collectors.toSet()); request.setAttribute( "sizeWarning", BundleUtil.getString(Bundle.ACADEMIC, "warning.need.to.filter.candidates")); } request.setAttribute("persons", persons); return mapping.findForward("searchPersons"); }
private void fireTilesChangedIncludeBorder(Set<Tile> tiles) { if (showBorder && (tileProvider instanceof Dimension) && (((Dimension) tileProvider).getDim() == DIM_NORMAL) && (((Dimension) tileProvider).getBorder() != null)) { final Set<Point> coordSet = new HashSet<>(); for (Tile tile : tiles) { final int tileX = tile.getX(), tileY = tile.getY(), borderSize = ((Dimension) tileProvider).getBorderSize(); for (int dx = -borderSize; dx <= borderSize; dx++) { for (int dy = -borderSize; dy <= borderSize; dy++) { coordSet.add(getTileCoordinates(tileX + dx, tileY + dy)); } } } for (TileListener listener : listeners) { listener.tilesChanged(this, coordSet); } } else { Set<Point> coords = tiles.stream().map(this::getTileCoordinates).collect(Collectors.toSet()); for (TileListener listener : listeners) { listener.tilesChanged(this, coords); } } }
private Set<Guid> findClusterNetworkIds(Guid clusterId) { return networkDao .getAllForCluster(clusterId) .stream() .map(Network::getId) .collect(Collectors.toSet()); }
private void assertNodesPresent(RoutingNodes routingNodes, String... nodes) { final Set<String> keySet = StreamSupport.stream(routingNodes.spliterator(), false) .map((p) -> (p.nodeId())) .collect(Collectors.toSet()); assertThat(keySet, containsInAnyOrder(nodes)); }
@Test public void ensureRecordsTest() { int empId = 11303; List<PayPeriod> payPeriods = periodService.getOpenPayPeriods(PayPeriodType.AF, empId, SortOrder.ASC); // Print existing records Set<TimeRecord> existingRecords = timeRecordService .getTimeRecords(Collections.singleton(empId), payPeriods, TimeRecordStatus.getAll()) .stream() .map(TimeRecord::new) .collect(Collectors.toSet()); logger.info("-------- EXISTING RECORDS --------"); printRecords(existingRecords); Stopwatch sw = Stopwatch.createStarted(); // Generate records manager.ensureRecords(empId); logger.info("generation took {} ms", sw.stop().elapsed(TimeUnit.MILLISECONDS)); // Print difference Set<TimeRecord> newRecords = new TreeSet<>( timeRecordService.getTimeRecords( Collections.singleton(empId), payPeriods, TimeRecordStatus.getAll())); logger.info("-------- NEW RECORDS --------"); printRecords(Sets.difference(newRecords, existingRecords)); }
@Override public Set<ArtifactSpec> resolveAll(final Set<ArtifactSpec> specs) { resetListeners(); final MavenResolvedArtifact[] artifacts; try { artifacts = this.resolver .resolve(specs.stream().map(ArtifactSpec::mavenGav).collect(Collectors.toList())) .withTransitivity() .as(MavenResolvedArtifact.class); } finally { completeTransferListener(); } return Arrays.stream(artifacts) .map( artifact -> { final MavenCoordinate coord = artifact.getCoordinate(); return new ArtifactSpec( "compile", coord.getGroupId(), coord.getArtifactId(), coord.getVersion(), coord.getPackaging().getId(), coord.getClassifier(), artifact.asFile()); }) .collect(Collectors.toSet()); }
@Override public Set<MoveUnableToEnd> checkMoveUnableToEnd(Moveable moveable, Field target) { return getMoveUnableToEnd() .stream() .filter(mute -> mute.unableToEnd(moveable, target)) .collect(Collectors.toSet()); }
private void getContainerTemplates(final State currentState, Collection<String> documentLinks) { OperationJoin.create( documentLinks.stream().map(documentLink -> Operation.createGet(this, documentLink))) .setCompletion( (ops, exs) -> { if (null != exs && !exs.isEmpty()) { failTask(exs); return; } try { Set<String> containerTemplateServiceLinks = ops.values() .stream() .map( operation -> operation.getBody(ContainerService.State.class) .containerTemplateServiceLink) .collect(Collectors.toSet()); loadNamesFromTemplates(currentState, containerTemplateServiceLinks); } catch (Throwable t) { failTask(t); } }) .sendWith(this); }
public Set<Hashtag> hashtags() { return Arrays.asList(body.split(" ")) .stream() .filter(a -> a.startsWith("#")) .map(a -> new Hashtag(a)) .collect(Collectors.toSet()); }
// -- Private private Set<Long> calendarIds(String ids) { return Arrays.asList(ids.split("\\+")) .stream() .map(i -> toLong(i)) .filter(i -> i > 0) .collect(Collectors.toSet()); }
private Class<?> loadImportedClass(String simpleName) throws ClassNotFoundException { Class<?> clazz = null; try { // check to see if we generated it clazz = super.loadClass(env.getGeneratedCodePackage() + "." + simpleName); } catch (ClassNotFoundException e) { } Set<String> names = env.getImports().stream().map(i -> i.resolveClass(simpleName)).collect(Collectors.toSet()); for (String name : names) { Class<?> next; try { next = super.loadClass(name); } catch (ClassNotFoundException e) { continue; } if (clazz != null) { throw new ClassNotFoundException("Ambiguous import. " + Arrays.asList(next, clazz)); } else { clazz = next; } } if (clazz == null) { throw new ClassNotFoundException("Could not find " + simpleName + " in " + env.getImports()); } return clazz; }
private final Set<CyRootNetwork> getRootNetworks() { return networkManager .getNetworkSet() .stream() .map(net -> cyRootNetworkManager.getRootNetwork(net)) .collect(Collectors.toSet()); }
public static <T, E> Set<T> getKeysByValue(Map<T, E> map, E value) { return map.entrySet() .stream() .filter(entry -> Objects.equals(entry.getValue(), value)) .map(Map.Entry::getKey) .collect(Collectors.toSet()); }
/** * Updates, creates or deletes UDI events using a given list of UDI Event DTOs. * * @param period {@link LocalDate} period for which UDI Events will be created/updated/deleted. * @param updatedUdiEvents {@link List} of {@link UdiEventDto}. */ public void updateUdiEvents(LocalDate period, List<UdiEventDto> updatedUdiEvents) { LOGGER.debug("Updating udi events for period [{}]", period); // fetch existing udi events Map<String, UdiEvent> udiEventsPerId = udiEventRepository .findUdiEventsForPeriod(period) .stream() .collect(Collectors.toMap(UdiEvent::getId, Function.identity())); // 1. Update or create UdiEvents for (UdiEventDto udiEventDto : updatedUdiEvents) { if (!udiEventsPerId.containsKey(udiEventDto.getId())) { createUdiEventFromDto(udiEventDto, period); } else { updateUdiEventFromDto(udiEventsPerId.get(udiEventDto.getId()), udiEventDto); } } // 2. Delete udi events not present in the list anymore Set<String> updatedIds = updatedUdiEvents.stream().map(UdiEventDto::getId).collect(Collectors.toSet()); udiEventsPerId .entrySet() .stream() .filter(udiEventPerId -> !updatedIds.contains(udiEventPerId.getKey())) .forEach( udiEventPerId -> { LOGGER.debug("Deleted udi event with id [{}]", udiEventPerId.getKey()); udiEventRepository.delete(udiEventPerId.getValue()); }); }
public JSONAreaResponse(JSONAreaRequest jar) { // Can't be a map as there may be multiple concrete circles per abstract List<Pair<AbstractContour, Circle2D>> cs = new Vector<>(); for (JSONCircle circle : jar.circles) { Circle2D c = new Circle2D(circle.x, circle.y, circle.radius); cs.add(new Pair<>(new AbstractContour(circle.label), c)); } List<ConcreteCircle> concreteCircles = new Vector<>(); for (Pair<AbstractContour, Circle2D> pair : cs) { concreteCircles.add(new ConcreteCircle(pair.car, pair.cdr)); } AbstractDiagram ad = new AbstractDiagram(cs.stream().map(x -> x.car).collect(Collectors.toSet())); ConcreteDiagram d = new ConcreteDiagram(ad, concreteCircles); circles = jar.circles; areas = d.getZoneAreaMap(); duration = 0; Optional<AbstractZone> key = Optional.empty(); for (AbstractZone z : areas.keySet()) { if (Double.isInfinite(areas.get(z))) { key = Optional.of(z); break; } } if (key.isPresent()) { areas.remove(key.get()); } }