/** * Create a disMaxJunc query rule based on the given search terms as well as the information from * given ontology terms * * @param ontologyTerms * @param searchTerms * @return disMaxJunc queryRule */ public QueryRule createDisMaxQueryRuleForAttribute( Set<String> searchTerms, Collection<OntologyTerm> ontologyTerms) { List<String> queryTerms = new ArrayList<String>(); if (searchTerms != null) { searchTerms .stream() .filter(searchTerm -> StringUtils.isNotBlank(searchTerm)) .forEach(searchTerm -> queryTerms.add(parseQueryString(searchTerm))); } // Handle tags with only one ontologyterm ontologyTerms .stream() .filter(ontologyTerm -> !ontologyTerm.getIRI().contains(",")) .forEach( ot -> { queryTerms.addAll(parseOntologyTermQueries(ot)); }); QueryRule disMaxQueryRule = createDisMaxQueryRuleForTerms(queryTerms); // Handle tags with multiple ontologyterms ontologyTerms .stream() .filter(ontologyTerm -> ontologyTerm.getIRI().contains(",")) .forEach( ot -> { disMaxQueryRule.getNestedRules().add(createShouldQueryRule(ot.getIRI())); }); return disMaxQueryRule; }
public static void main(String[] args) { final Collection<Task> tasks = Arrays.asList( new Task(Status.OPEN, 5), new Task(Status.OPEN, 13), new Task(Status.CLOSED, 8)); // 在这个task集合中一共有多少个OPEN状态的点?在Java 8之前,要解决这个问题,则需要使用foreach循环遍历task集合;但是在Java // 8中可以利用steams解决:包括一系列元素的列表,并且支持顺序和并行处理。 final long totalPointsOfOpenTasks = tasks .stream() .filter(task -> task.getStatus() == Status.OPEN) .mapToInt(Task::getPoints) .sum(); System.out.println("Total points: " + totalPointsOfOpenTasks); /** * Steam之上的操作可分为中间操作和晚期操作。 * * <p>中间操作会返回一个新的steam——执行一个中间操作(例如filter)并不会执行实际的过滤操作,而是创建一个新的steam, * 并将原steam中符合条件的元素放入新创建的steam。 * * <p>晚期操作(例如forEach或者sum),会遍历steam并得出结果或者附带结果;在执行晚期操作之后,steam处理线已经处理完毕, * 就不能使用了。在几乎所有情况下,晚期操作都是立刻对steam进行遍历。 */ // steam的另一个价值是创造性地支持并行处理(parallel processing)。对于上述的tasks集合,我们可以用下面的代码计算所有任务的点数之和: // Calculate total points of all tasks final double totalPoints = tasks .stream() .parallel() .map(task -> task.getPoints()) // or map( Task::getPoints ) .reduce(0, Integer::sum); System.out.println("Total points (all tasks): " + totalPoints); // 对于一个集合,经常需要根据某些条件对其中的元素分组。利用steam提供的API可以很快完成这类任务,代码如下: // Group tasks by their status final Map<Status, List<Task>> map = tasks.stream().collect(Collectors.groupingBy(Task::getStatus)); System.out.println(map); // 最后一个关于tasks集合的例子问题是:如何计算集合中每个任务的点数在集合中所占的比重,具体处理的代码如下: // Calculate the weight of each tasks (as percent of total points) final Collection<String> result = tasks .stream() // Stream< String > .mapToInt(Task::getPoints) // IntStream .asLongStream() // LongStream .mapToDouble(points -> points / totalPoints) // DoubleStream .boxed() // Stream< Double > .mapToLong(weigth -> (long) (weigth * 100)) // LongStream .mapToObj(percentage -> percentage + "%") // Stream< String> .collect(Collectors.toList()); // List< String > System.out.println(result); // [19%, 50%, 30%] }
public static List<ImageInfo> filterImageInfo( Collection<ImageInfo> images, List<Predicate<ImageInfo>> predicates) { Collection<ImageInfo> intermediate = images; for (Predicate<ImageInfo> aPredicate : predicates) { intermediate = intermediate.stream().filter(aPredicate).collect(Collectors.<ImageInfo>toList()); } return intermediate.stream().collect(Collectors.<ImageInfo>toList()); }
@Test public void testStubDeploymentConfig() { IImageStream is = givenAnImageStreamTo(project.getName(), DOCKER_TAG); IResource resource = job.stubDeploymentConfig(factory, RESOURCE_NAME, DOCKER_TAG, is); assertTrue(resource instanceof IDeploymentConfig); IDeploymentConfig dc = (IDeploymentConfig) resource; assertEquals( "Exp. replicas to match incoming params", parameters.getReplicas(), dc.getReplicas()); assertEquals( "Exp. the selector key to be the resourceName", RESOURCE_NAME, dc.getReplicaSelector().get(DeployImageJob.SELECTOR_KEY)); IContainer container = dc.getContainer(RESOURCE_NAME); assertNotNull("Exp. to find a container with the resource name", container); Collection<IDeploymentTrigger> triggers = dc.getTriggers(); assertTrue( "Exp. a config change trigger", triggers .stream() .filter(t -> DeploymentTriggerType.CONFIG_CHANGE.equals(t.getType())) .findFirst() .isPresent()); // assert ict matches container spec Optional<IDeploymentTrigger> icTrigger = triggers .stream() .filter(t -> DeploymentTriggerType.IMAGE_CHANGE.equals(t.getType())) .findFirst(); assertTrue(icTrigger.isPresent()); IDeploymentImageChangeTrigger imageChangeTrigger = (IDeploymentImageChangeTrigger) icTrigger.get(); Collection<String> names = imageChangeTrigger.getContainerNames(); assertEquals(1, names.size()); assertEquals( "Exp. the container and trigger names to match", container.getName(), names.iterator().next()); assertTrue(imageChangeTrigger.isAutomatic()); assertEquals(ResourceKind.IMAGE_STREAM_TAG, imageChangeTrigger.getKind()); assertEquals( "Exp. the trigger to point to the imagestream name", new DockerImageURI(null, null, is.getName(), DOCKER_TAG.getTag()), imageChangeTrigger.getFrom()); assertEquals( "Exp. the trigger to point to the imagestream name", is.getNamespace(), imageChangeTrigger.getNamespace()); }
@Override public ChannelStatistics getStatistics() { final ChannelStatistics cs = new ChannelStatistics(); try (Locked l = lock(this.readLock)) { final Collection<ChannelInformation> cis = list(); cs.setTotalNumberOfArtifacts( cis.stream().mapToLong(ci -> ci.getState().getNumberOfArtifacts()).sum()); cs.setTotalNumberOfBytes( cis.stream().mapToLong(ci -> ci.getState().getNumberOfBytes()).sum()); } return cs; }
/** * Selects a triple by providing an RDFStatement object collection * * @param stmts * @return Query result */ public List<RDFStatement> selectTriples(Collection<? extends RDFStatement> stmts) { if (stmts == null || stmts.isEmpty()) { return null; } RDFStatement rdfs = stmts.stream().findFirst().get(); String si = rdfs.getSubject(), pi = rdfs.getPredicate(), oi = rdfs.getObject(); // doesnt get value StringBuilder sb = new StringBuilder(); stmts .stream() .forEach( s -> { s.setObject(RDFUtils.escapeString(s.getObject())); sb.append(s.getSubject()) .append(" ") .append(s.getPredicate()) .append(" ") .append(s.getObject()) .append(" .\n"); }); String query = String.format( this.defaultPrefices + "SELECT * FROM <%s> WHERE { %s }", this.graphName, sb.toString()); Query sparqlQuery = QueryFactory.create(query); VirtuosoQueryExecution vqe = VirtuosoQueryExecutionFactory.create(sparqlQuery, this.graph); List<RDFStatement> stmtsList = new ArrayList<>(); ResultSet rs = vqe.execSelect(); while (rs.hasNext()) { QuerySolution qs = rs.nextSolution(); RDFNode s = qs.get(si); RDFNode p = qs.get(pi); RDFNode o = qs.get(oi); RDFStatement stmt = new RDFStatement( s != null ? s.toString() : "null", p != null ? p.toString() : "null", o != null ? RDFUtils.escapeString(o.toString()) : "null"); stmtsList.add(stmt); logger.info("fetched: {}", stmt.toString()); } return stmtsList; }
public ActionForward searchPerson( ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws FenixServiceException { final SimpleSearchPersonWithStudentBean searchPersonBean = (SimpleSearchPersonWithStudentBean) getObjectFromViewState("searchPersonBean"); request.setAttribute("searchPersonBean", searchPersonBean); Collection<Person> persons = searchPersonBean.search(); request.removeAttribute("sizeWarning"); if (persons.size() == 1) { request.setAttribute("personId", persons.iterator().next().getExternalId()); return showOperations(mapping, form, request, response); } if (persons.size() > 50) { persons = persons.stream().limit(50).collect(Collectors.toSet()); request.setAttribute( "sizeWarning", BundleUtil.getString(Bundle.ACADEMIC, "warning.need.to.filter.candidates")); } request.setAttribute("persons", persons); return mapping.findForward("searchPersons"); }
/* * (non-Javadoc) * @see de.mq.portfolio.exchangerate.support.ExchangeRateDatebaseRepository#exchangerates(java.util.Collection) */ @Override public final Collection<ExchangeRate> exchangerates( final Collection<ExchangeRate> exchangerates) { Assert.notNull(exchangerates); final Collection<ExchangeRate> rates = new HashSet<>(); rates.addAll(exchangerates); rates.addAll( exchangerates .stream() .map(er -> new ExchangeRateImpl(er.target(), er.source())) .collect(Collectors.toSet())); final Collection<ExchangeRate> results = new HashSet<>(); rates.forEach( rate -> results.addAll( mongoOperations.find( Query.query( Criteria.where(SOURCE_FIELD_NAME) .is(rate.source()) .and(TARGET_FIELD_NAME) .is(rate.target())), ExchangeRateImpl.class))); return Collections.unmodifiableCollection(results); }
private double calcEntropy(Node parent, Collection<Node> children) { int total = parent.getTotal(); return children .stream() .map(e -> ((double) e.getTotal() / (double) total) * e.getEntropy()) .reduce(0.0, (x, y) -> x + y); }
public List<IProject> getModulesProjects() { Collection<String> modulesPath = info.modulesNameToPath.values(); return modulesPath .stream() .map(path -> workspace.getRoot().getProject(path)) .collect(Collectors.toList()); }
private void waitTillNodesActive() throws Exception { for (int i = 0; i < 60; i++) { Thread.sleep(3000); ZkStateReader zkStateReader = cloudClient.getZkStateReader(); ClusterState clusterState = zkStateReader.getClusterState(); DocCollection collection1 = clusterState.getCollection("collection1"); Slice slice = collection1.getSlice("shard1"); Collection<Replica> replicas = slice.getReplicas(); boolean allActive = true; Collection<String> nodesDownNames = nodesDown.stream().map(n -> n.coreNodeName).collect(Collectors.toList()); Collection<Replica> replicasToCheck = replicas .stream() .filter(r -> !nodesDownNames.contains(r.getName())) .collect(Collectors.toList()); for (Replica replica : replicasToCheck) { if (!clusterState.liveNodesContain(replica.getNodeName()) || replica.getState() != Replica.State.ACTIVE) { allActive = false; break; } } if (allActive) { return; } } printLayout(); fail("timeout waiting to see all nodes active"); }
/** * Batch insert statements into the graph * * @param statements * @return true if and only if the query execution was successful */ public boolean batchInsertStatements(Collection<? extends RDFStatement> statements) { if (statements == null || statements.isEmpty()) { return false; } try { StringBuilder sb = new StringBuilder(); statements .stream() .forEach( s -> { s.setObject(RDFUtils.escapeString(s.getObject())); sb.append("INSERT INTO GRAPH <") .append(this.graphName) .append("> { ") .append(s.toString()) .append(" };\n"); }); VirtuosoUpdateRequest vur = VirtuosoUpdateFactory.create(sb.toString(), this.graph); vur.exec(); return true; } catch (Exception ex) { logger.error("Exception while batch inserting statements", ex); } return false; }
// find all elements using pattern private static void testSelect() { Collection<String> collection = Lists.newArrayList("2", "14", "3", "13", "43"); MutableCollection<String> mutableCollection = FastList.newListWith("2", "14", "3", "13", "43"); Iterable<String> iterable = collection; // find all elements using pattern List<String> jdk = collection .stream() .filter((s) -> s.contains("1")) .collect(Collectors.toList()); // using JDK Iterable<String> guava = Iterables.filter(iterable, (s) -> s.contains("1")); // using guava Collection<String> apache = CollectionUtils.select(iterable, (s) -> s.contains("1")); // using Apache MutableCollection<String> gs = mutableCollection.select((s) -> s.contains("1")); // using GS System.out.println( "select = " + jdk + ":" + guava + ":" + apache + ":" + gs); // print select = [14, 13]:[14, 13]:[14, 13]:[14, 13] }
public void updatePheromone() { double length = edges.stream().mapToDouble(edge -> edge.weight()).sum(); for (PheromoneEdge edge : edges) { edge.volatilize(0.2); edge.accumulate(1.0 / length); } }
public static List<RankCompactDTO> mapFromRanksEntitiesC( Collection<Rank> ranks, long currentUserId) { return ranks .stream() .map((rank) -> mapFromRankEntity(rank, currentUserId)) .collect(Collectors.toList()); }
@Override public boolean removeAll(Collection<?> c) { return c.stream() .filter((o) -> (o != null)) .map((o) -> remove(o)) .reduce(false, Boolean::logicalOr); }
@Override public void modelHierarchyDeleted(int subModel, Collection<Integer> deletedParents) { deletedParents .stream() .map(p -> new ModelHierarchyInfo(p, subModel)) .forEach(deletedModelHierarchies::add); }
private void getContainerTemplates(final State currentState, Collection<String> documentLinks) { OperationJoin.create( documentLinks.stream().map(documentLink -> Operation.createGet(this, documentLink))) .setCompletion( (ops, exs) -> { if (null != exs && !exs.isEmpty()) { failTask(exs); return; } try { Set<String> containerTemplateServiceLinks = ops.values() .stream() .map( operation -> operation.getBody(ContainerService.State.class) .containerTemplateServiceLink) .collect(Collectors.toSet()); loadNamesFromTemplates(currentState, containerTemplateServiceLinks); } catch (Throwable t) { failTask(t); } }) .sendWith(this); }
@Override public void classHierarchyDeleted(String subClass, Collection<String> deletedParents) { deletedParents .stream() .map(p -> new ClassHierarchyInfo(p, subClass)) .forEach(deletedClassHierarchies::add); }
@Override public Collection<Ticket> getTickets() { final Collection<Element> cacheTickets = this.ehcacheTicketsCache.getAll(this.ehcacheTicketsCache.getKeysWithExpiryCheck()).values(); return decodeTickets( cacheTickets.stream().map(e -> (Ticket) e.getObjectValue()).collect(Collectors.toList())); }
public List<GameObject> getGameObjectsInRange( Vector2f pos, int width, int height, Collection<GameObject> list) { Rectangle range = new Rectangle(pos.x, pos.y, width, height); Stream<GameObject> filter = list.stream().filter(e -> range.contains(e.getPos().x, e.getPos().y)); return filter.collect(Collectors.toList()); }
@Nullable static HighlightInfo checkFileDuplicates(@NotNull PsiJavaModule element, @NotNull PsiFile file) { Module module = ModuleUtilCore.findModuleForPsiElement(element); if (module != null) { Project project = file.getProject(); Collection<VirtualFile> others = FilenameIndex.getVirtualFilesByName(project, MODULE_INFO_FILE, new ModulesScope(module)); if (others.size() > 1) { String message = JavaErrorMessages.message("module.file.duplicate"); HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.ERROR) .range(range(element)) .description(message) .create(); others .stream() .map(f -> PsiManager.getInstance(project).findFile(f)) .filter(f -> f != file) .findFirst() .ifPresent( duplicate -> QuickFixAction.registerQuickFixAction( info, new GoToSymbolFix( duplicate, JavaErrorMessages.message("module.open.duplicate.text")))); return info; } } return null; }
@Override public boolean requirements(Combat c, Character user, Character target) { Collection<BodyPart> tails = user.body.get("tail"); boolean hasFuckableTail = tails.stream().anyMatch(p -> p.isType("tail") && p != TailPart.cat); return hasFuckableTail && (user.get(Attribute.Dark) >= 1 || user.get(Attribute.Seduction) >= 20); }
public Subscription( User subscriber, Frequency frequency, Severity severity, Collection<String> topics) { this( subscriber, frequency, severity, topics.stream().map(String::trim).collect(Collectors.joining(","))); }
@Override public Collection<OpenstackPort> ports(String networkId) { Collection<OpenstackPort> ports = restHandler.getPorts(); List<OpenstackPort> portList = ports.stream().filter(p -> p.networkId().equals(networkId)).collect(Collectors.toList()); return portList; }
/** * Returns the type model for the requested metamodel type, creating the model as needed. * * @param am * @param te * @return */ private ProcessorMetaModelType getTypeModel(final AnnotationMirror am, final TypeElement te) { return metamodelTypes .stream() .filter((m) -> m.typeName.contentEquals(getClassName(te))) .filter((m) -> m.metamodelAnnotationName.contentEquals(getAnnotationClassName(am))) .findAny() .orElseGet(() -> modelType(am, te)); }
public static <K, R, V> Map<K, R> indexBy( Function<V, K> keyFn, Function<V, R> valueFn, Collection<V> xs) { checkNotNull(xs, "xs cannot be null"); checkNotNull(keyFn, "keyFn cannot be null"); checkNotNull(valueFn, "valueFn cannot be null"); return xs.stream().collect(toMap(keyFn, valueFn)); }
public PartitionedDatabase(String name, Collection<Database> partitions) { this.name = name; this.partitions = partitions .stream() .sorted((db1, db2) -> db1.name().compareTo(db2.name())) .collect(Collectors.toList()); this.partitioner = new SimpleKeyHashPartitioner(this.partitions); }
/** * Returns the total amount in pence of what is currently being returned to the customer. * * @return an integer indicating the totalChange being returned to the customer. */ private int getCurrentChange() { int totalChange = 0; totalChange = changeInProgress.stream().map((c) -> c.getValueAsInt()).reduce(totalChange, Integer::sum); return totalChange; }
@Nonnull protected Set<MinecraftPlayer> commandSourceCollectionToMinecraftPlayer( @Nonnull Collection<? extends CommandSource> collection) { return collection .stream() .filter(source -> source instanceof Player) .map(player -> new MinecraftPlayer(player.getName(), ((Player) player).getUniqueId())) .collect(Collectors.toCollection(HashSet::new)); }