/** * Returns the context for a LIPO compile action. This uses the include dirs and defines of the * library, but the declared inclusion dirs/srcs from both the library and the owner binary. * * <p>TODO(bazel-team): this might make every LIPO target have an unnecessary large set of * inclusion dirs/srcs. The correct behavior would be to merge only the contexts of actual * referred targets (as listed in .imports file). * * <p>Undeclared inclusion checking ({@link #getDeclaredIncludeDirs()}, {@link * #getDeclaredIncludeWarnDirs()}, and {@link #getDeclaredIncludeSrcs()}) needs to use the union * of the contexts of the involved source files. * * <p>For include and define command line flags ({@link #getIncludeDirs()} {@link * #getQuoteIncludeDirs()}, {@link #getSystemIncludeDirs()}, and {@link #getDefines()}) LIPO * compilations use the same values as non-LIPO compilation. * * <p>Include scanning is not handled by this method. See {@code * IncludeScannable#getAuxiliaryScannables()} instead. * * @param ownerContext the compilation context of the owner binary * @param libContext the compilation context of the library */ public static CppCompilationContext mergeForLipo( CppCompilationContext ownerContext, CppCompilationContext libContext) { ImmutableSet.Builder<Artifact> prerequisites = ImmutableSet.builder(); prerequisites.addAll(ownerContext.compilationPrerequisites); prerequisites.addAll(libContext.compilationPrerequisites); ModuleInfo.Builder moduleInfo = new ModuleInfo.Builder(); moduleInfo.merge(ownerContext.moduleInfo); moduleInfo.merge(libContext.moduleInfo); ModuleInfo.Builder picModuleInfo = new ModuleInfo.Builder(); picModuleInfo.merge(ownerContext.picModuleInfo); picModuleInfo.merge(libContext.picModuleInfo); return new CppCompilationContext( libContext.commandLineContext, prerequisites.build(), mergeSets(ownerContext.declaredIncludeDirs, libContext.declaredIncludeDirs), mergeSets(ownerContext.declaredIncludeWarnDirs, libContext.declaredIncludeWarnDirs), mergeSets(ownerContext.declaredIncludeSrcs, libContext.declaredIncludeSrcs), mergeSets(ownerContext.pregreppedHdrs, libContext.pregreppedHdrs), moduleInfo.build(), picModuleInfo.build(), mergeSets(ownerContext.transitiveModuleMaps, libContext.transitiveModuleMaps), mergeSets(ownerContext.directModuleMaps, libContext.directModuleMaps), libContext.cppModuleMap, libContext.provideTransitiveModuleMaps, libContext.useHeaderModules); }
@Override public PlanNode visitIndexJoin(IndexJoinNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> probeInputsBuilder = ImmutableSet.builder(); probeInputsBuilder .addAll(context.get()) .addAll(Iterables.transform(node.getCriteria(), IndexJoinNode.EquiJoinClause::getProbe)); if (node.getProbeHashSymbol().isPresent()) { probeInputsBuilder.add(node.getProbeHashSymbol().get()); } Set<Symbol> probeInputs = probeInputsBuilder.build(); ImmutableSet.Builder<Symbol> indexInputBuilder = ImmutableSet.builder(); indexInputBuilder .addAll(context.get()) .addAll(Iterables.transform(node.getCriteria(), IndexJoinNode.EquiJoinClause::getIndex)); if (node.getIndexHashSymbol().isPresent()) { indexInputBuilder.add(node.getIndexHashSymbol().get()); } Set<Symbol> indexInputs = indexInputBuilder.build(); PlanNode probeSource = context.rewrite(node.getProbeSource(), probeInputs); PlanNode indexSource = context.rewrite(node.getIndexSource(), indexInputs); return new IndexJoinNode( node.getId(), node.getType(), probeSource, indexSource, node.getCriteria(), node.getProbeHashSymbol(), node.getIndexHashSymbol()); }
@Override public PlanNode visitJoin(JoinNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> leftInputsBuilder = ImmutableSet.builder(); leftInputsBuilder .addAll(context.get()) .addAll(Iterables.transform(node.getCriteria(), JoinNode.EquiJoinClause::getLeft)); if (node.getLeftHashSymbol().isPresent()) { leftInputsBuilder.add(node.getLeftHashSymbol().get()); } Set<Symbol> leftInputs = leftInputsBuilder.build(); ImmutableSet.Builder<Symbol> rightInputsBuilder = ImmutableSet.builder(); rightInputsBuilder .addAll(context.get()) .addAll(Iterables.transform(node.getCriteria(), JoinNode.EquiJoinClause::getRight)); if (node.getRightHashSymbol().isPresent()) { rightInputsBuilder.add(node.getRightHashSymbol().get()); } Set<Symbol> rightInputs = rightInputsBuilder.build(); PlanNode left = context.rewrite(node.getLeft(), leftInputs); PlanNode right = context.rewrite(node.getRight(), rightInputs); return new JoinNode( node.getId(), node.getType(), left, right, node.getCriteria(), node.getLeftHashSymbol(), node.getRightHashSymbol()); }
static { ImmutableSet.Builder<Double> integralBuilder = ImmutableSet.builder(); ImmutableSet.Builder<Double> fractionalBuilder = ImmutableSet.builder(); integralBuilder.addAll(Doubles.asList(0.0, -0.0, Double.MAX_VALUE, -Double.MAX_VALUE)); // Add small multiples of MIN_VALUE and MIN_NORMAL for (int scale = 1; scale <= 4; scale++) { for (double d : Doubles.asList(Double.MIN_VALUE, Double.MIN_NORMAL)) { fractionalBuilder.add(d * scale).add(-d * scale); } } for (double d : Doubles.asList( 0, 1, 2, 7, 51, 102, Math.scalb(1.0, 53), Integer.MIN_VALUE, Integer.MAX_VALUE, Long.MIN_VALUE, Long.MAX_VALUE)) { for (double delta : Doubles.asList(0.0, 1.0, 2.0)) { integralBuilder.addAll(Doubles.asList(d + delta, d - delta, -d - delta, -d + delta)); } for (double delta : Doubles.asList(0.01, 0.1, 0.25, 0.499, 0.5, 0.501, 0.7, 0.8)) { double x = d + delta; if (x != Math.round(x)) { fractionalBuilder.add(x); } } } INTEGRAL_DOUBLE_CANDIDATES = integralBuilder.build(); fractionalBuilder.add(1.414).add(1.415).add(Math.sqrt(2)); fractionalBuilder.add(5.656).add(5.657).add(4 * Math.sqrt(2)); for (double d : INTEGRAL_DOUBLE_CANDIDATES) { double x = 1 / d; if (x != Math.rint(x)) { fractionalBuilder.add(x); } } FRACTIONAL_DOUBLE_CANDIDATES = fractionalBuilder.build(); FINITE_DOUBLE_CANDIDATES = Iterables.concat(FRACTIONAL_DOUBLE_CANDIDATES, INTEGRAL_DOUBLE_CANDIDATES); POSITIVE_FINITE_DOUBLE_CANDIDATES = Iterables.filter( FINITE_DOUBLE_CANDIDATES, new Predicate<Double>() { @Override public boolean apply(Double input) { return input.doubleValue() > 0.0; } }); DOUBLE_CANDIDATES_EXCEPT_NAN = Iterables.concat(FINITE_DOUBLE_CANDIDATES, INFINITIES); ALL_DOUBLE_CANDIDATES = Iterables.concat(DOUBLE_CANDIDATES_EXCEPT_NAN, asList(Double.NaN)); }
@Override public Set<ClassName> referencedClasses() { ImmutableSet.Builder<ClassName> builder = new ImmutableSet.Builder<ClassName>(); if (extendsBound.isPresent()) { builder.addAll(extendsBound.get().referencedClasses()); } if (superBound.isPresent()) { builder.addAll(superBound.get().referencedClasses()); } return builder.build(); }
protected Builder<P, D> addTransaction( final DataModification<P, D> data, final Predicate<P> keyFilter) { updatedOperational.putAll(Maps.filterKeys(data.getUpdatedOperationalData(), keyFilter)); updatedConfiguration.putAll(Maps.filterKeys(data.getUpdatedConfigurationData(), keyFilter)); originalConfiguration.putAll(Maps.filterKeys(data.getOriginalConfigurationData(), keyFilter)); originalOperational.putAll(Maps.filterKeys(data.getOriginalOperationalData(), keyFilter)); createdOperational.putAll(Maps.filterKeys(data.getCreatedOperationalData(), keyFilter)); createdConfiguration.putAll(Maps.filterKeys(data.getCreatedConfigurationData(), keyFilter)); removedOperational.addAll(Sets.filter(data.getRemovedOperationalData(), keyFilter)); removedConfiguration.addAll(Sets.filter(data.getRemovedConfigurationData(), keyFilter)); return this; }
/** * Includes superclass and super interface hierarchy. * * @return list of classTypes. */ public Set<JavaType.ClassJavaType> superTypes() { ImmutableSet.Builder<JavaType.ClassJavaType> types = ImmutableSet.builder(); JavaType.ClassJavaType superClassType = (JavaType.ClassJavaType) this.superClass(); types.addAll(this.interfacesOfType()); while (superClassType != null) { types.add(superClassType); TypeJavaSymbol superClassSymbol = superClassType.getSymbol(); types.addAll(superClassSymbol.interfacesOfType()); superClassType = (JavaType.ClassJavaType) superClassSymbol.superClass(); } return types.build(); }
@Override public Iterable<BuildTarget> findDepsForTargetFromConstructorArgs( BuildTarget buildTarget, CellPathResolver cellRoots, Arg constructorArg) { ImmutableSet.Builder<BuildTarget> deps = ImmutableSet.builder(); // Get any parse time deps from the C/C++ platforms. deps.addAll(CxxPlatforms.getParseTimeDeps(cxxPlatforms.getValues())); for (PythonPlatform pythonPlatform : pythonPlatforms.getValues()) { deps.addAll(pythonPlatform.getCxxLibrary().asSet()); } return deps.build(); }
@Override public void importIdentifiers(ValueTable sourceValueTable) throws IOException { ValueTable identifiersTable = identifiersTableService.ensureIdentifiersTable(sourceValueTable.getEntityType()); ImmutableSet.Builder<String> builder = ImmutableSet.builder(); builder.addAll( Iterables.transform( sourceValueTable.getVariableEntities(), new Function<VariableEntity, String>() { @Override public String apply(VariableEntity input) { return input.getIdentifier(); } })); ValueTable sourceIdentifiersTable = new StaticValueTable( sourceValueTable.getDatasource(), identifiersTable.getName(), builder.build(), identifiersTable.getEntityType()); // Don't copy null values otherwise, we'll delete existing mappings DatasourceCopier.Builder.newCopier() .dontCopyNullValues() .withLoggingListener() .build() .copy(sourceIdentifiersTable, identifiersTableService.getDatasource()); }
@Override public PlanNode visitSemiJoin(SemiJoinNode node, RewriteContext<Set<Symbol>> context) { ImmutableSet.Builder<Symbol> sourceInputsBuilder = ImmutableSet.builder(); sourceInputsBuilder.addAll(context.get()).add(node.getSourceJoinSymbol()); if (node.getSourceHashSymbol().isPresent()) { sourceInputsBuilder.add(node.getSourceHashSymbol().get()); } Set<Symbol> sourceInputs = sourceInputsBuilder.build(); ImmutableSet.Builder<Symbol> filteringSourceInputBuilder = ImmutableSet.builder(); filteringSourceInputBuilder.add(node.getFilteringSourceJoinSymbol()); if (node.getFilteringSourceHashSymbol().isPresent()) { filteringSourceInputBuilder.add(node.getFilteringSourceHashSymbol().get()); } Set<Symbol> filteringSourceInputs = filteringSourceInputBuilder.build(); PlanNode source = context.rewrite(node.getSource(), sourceInputs); PlanNode filteringSource = context.rewrite(node.getFilteringSource(), filteringSourceInputs); return new SemiJoinNode( node.getId(), source, filteringSource, node.getSourceJoinSymbol(), node.getFilteringSourceJoinSymbol(), node.getSemiJoinOutput(), node.getSourceHashSymbol(), node.getFilteringSourceHashSymbol()); }
private void updateNodes(MemoryPoolAssignmentsRequest assignments) { ImmutableSet.Builder<Node> builder = ImmutableSet.builder(); Set<Node> aliveNodes = builder .addAll(nodeManager.getNodes(ACTIVE)) .addAll(nodeManager.getNodes(SHUTTING_DOWN)) .build(); ImmutableSet<String> aliveNodeIds = aliveNodes.stream().map(Node::getNodeIdentifier).collect(toImmutableSet()); // Remove nodes that don't exist anymore // Make a copy to materialize the set difference Set<String> deadNodes = ImmutableSet.copyOf(difference(nodes.keySet(), aliveNodeIds)); nodes.keySet().removeAll(deadNodes); // Add new nodes for (Node node : aliveNodes) { if (!nodes.containsKey(node.getNodeIdentifier())) { nodes.put( node.getNodeIdentifier(), new RemoteNodeMemory( httpClient, memoryInfoCodec, assignmentsRequestJsonCodec, locationFactory.createMemoryInfoLocation(node))); } } // Schedule refresh for (RemoteNodeMemory node : nodes.values()) { node.asyncRefresh(assignments); } }
@Override public AnswerKey apply(AnswerKey input) { final Set<Response> existingResponses = Sets.newHashSet(input.allResponses()); final ImmutableSet.Builder<AssessedResponse> newAssessedResponses = ImmutableSet.builder(); newAssessedResponses.addAll(input.annotatedResponses()); for (final AssessedResponse assessedResponse : input.annotatedResponses()) { if (assessedResponse.assessment().realis().isPresent()) { final Response responseWithAssessedRealis = assessedResponse .response() .copyWithSwappedRealis(assessedResponse.assessment().realis().get()); if (!existingResponses.contains(responseWithAssessedRealis)) { newAssessedResponses.add( AssessedResponse.from(responseWithAssessedRealis, assessedResponse.assessment())); existingResponses.add(responseWithAssessedRealis); } } } return AnswerKey.from( input.docId(), newAssessedResponses.build(), input.unannotatedResponses(), input.corefAnnotation()); }
public Collection<ITrait> getAllTraits() { ImmutableSet.Builder<ITrait> builder = ImmutableSet.builder(); for (List<ITrait> traitlist : traits.values()) { builder.addAll(traitlist); } return builder.build(); }
private ImmutableSet<ASTAnnotation> flatten(ImmutableSet<ASTAnnotation>... sets) { ImmutableSet.Builder<ASTAnnotation> combineBuilder = ImmutableSet.builder(); for (ImmutableSet<ASTAnnotation> set : sets) { combineBuilder.addAll(set); } return combineBuilder.build(); }
/** * Returns the ShellCommand object that is supposed to generate a code coverage report from data * obtained during the test run. This method will also generate a set of source paths to the class * files tested during the test run. */ private static Step getReportCommand( ImmutableSet<JavaLibrary> rulesUnderTest, Optional<DefaultJavaPackageFinder> defaultJavaPackageFinderOptional, ProjectFilesystem filesystem, Path outputDirectory, CoverageReportFormat format) { ImmutableSet.Builder<String> srcDirectories = ImmutableSet.builder(); ImmutableSet.Builder<Path> pathsToClasses = ImmutableSet.builder(); // Add all source directories of java libraries that we are testing to -sourcepath. for (JavaLibrary rule : rulesUnderTest) { ImmutableSet<String> sourceFolderPath = getPathToSourceFolders(rule, defaultJavaPackageFinderOptional, filesystem); if (!sourceFolderPath.isEmpty()) { srcDirectories.addAll(sourceFolderPath); } Path pathToOutput = rule.getPathToOutput(); if (pathToOutput == null) { continue; } pathsToClasses.add(pathToOutput); } return new GenerateCodeCoverageReportStep( srcDirectories.build(), pathsToClasses.build(), outputDirectory, format); }
@VisibleForTesting List<Module> createModulesForProjectConfigs() throws IOException { DependencyGraph dependencyGraph = partialGraph.getDependencyGraph(); List<Module> modules = Lists.newArrayList(); // Convert the project_config() targets into modules and find the union of all jars passed to // no_dx. ImmutableSet.Builder<Path> noDxJarsBuilder = ImmutableSet.builder(); for (BuildTarget target : partialGraph.getTargets()) { BuildRule buildRule = dependencyGraph.findBuildRuleByTarget(target); ProjectConfig projectConfig = (ProjectConfig) buildRule.getBuildable(); BuildRule srcRule = projectConfig.getSrcRule(); if (srcRule != null) { Buildable buildable = srcRule.getBuildable(); if (buildable instanceof AndroidBinary) { AndroidBinary androidBinary = (AndroidBinary) buildable; AndroidDexTransitiveDependencies binaryDexTransitiveDependencies = androidBinary.findDexTransitiveDependencies(); noDxJarsBuilder.addAll(binaryDexTransitiveDependencies.noDxClasspathEntries); } } Module module = createModuleForProjectConfig(projectConfig); modules.add(module); } ImmutableSet<Path> noDxJars = noDxJarsBuilder.build(); // Update module dependencies to apply scope="PROVIDED", where appropriate. markNoDxJarsAsProvided(modules, noDxJars); return modules; }
private static ImmutableSet.Builder<String> validateLabels(Set<String> labelSet) { ImmutableSet.Builder<String> result = ImmutableSet.builder(); for (String label : labelSet) { BuckConfig.validateLabelName(label); } result.addAll(labelSet); return result; }
@Override public ImmutableSet<String> get() { ImmutableSet.Builder<String> result = validateLabels(excludedSet.get()); result.addAll(getBuckConfig().getDefaultExcludedLabels()); ImmutableSet<String> allExcluded = result.build(); // If someone has included a test, then we should really run it. return Sets.difference(allExcluded, getIncludedLabels()).immutableCopy(); }
private Set<JavaType.ClassJavaType> interfacesOfType() { ImmutableSet.Builder<JavaType.ClassJavaType> builder = ImmutableSet.builder(); for (JavaType interfaceType : getInterfaces()) { JavaType.ClassJavaType classType = (JavaType.ClassJavaType) interfaceType; builder.add(classType); builder.addAll(classType.getSymbol().interfacesOfType()); } return builder.build(); }
@Override public List<String> listSchemaNames(String catalogName) { checkCatalogName(catalogName); ImmutableSet.Builder<String> schemaNames = ImmutableSet.builder(); for (ConnectorMetadataEntry entry : allConnectorsFor(catalogName)) { schemaNames.addAll(entry.getMetadata().listSchemaNames()); } return ImmutableList.copyOf(schemaNames.build()); }
@Override public Iterable<BuildTarget> findDepsForTargetFromConstructorArgs( BuildTarget buildTarget, CellPathResolver cellRoots, Arg constructorArg) { ImmutableSet.Builder<BuildTarget> deps = ImmutableSet.builder(); if (constructorArg.useCxxLibraries.or(false)) { deps.addAll(CxxPlatforms.getParseTimeDeps(cxxPlatform)); } return deps.build(); }
protected Builder<P, D> addOperationalChangeSet(final RootedChangeSet<P, D> changeSet) { if (changeSet == null) { return this; } originalOperational.putAll(changeSet.getOriginal()); createdOperational.putAll(changeSet.getCreated()); updatedOperational.putAll(changeSet.getUpdated()); removedOperational.addAll(changeSet.getRemoved()); return this; }
public static Set<HivePrivilegeInfo> toGrants(List<PrivilegeGrantInfo> userGrants) { if (userGrants == null) { return ImmutableSet.of(); } ImmutableSet.Builder<HivePrivilegeInfo> privileges = ImmutableSet.builder(); for (PrivilegeGrantInfo userGrant : userGrants) { privileges.addAll(parsePrivilege(userGrant)); } return privileges.build(); }
private void installNativeLibraryFiles() throws Exception { ImmutableMultimap<String, Path> allLibraries = getAllLibraries(); ImmutableSet.Builder<String> providedLibraries = ImmutableSet.builder(); for (String abi : getDeviceAbis()) { ImmutableMap<String, Path> libraries = getRequiredLibrariesForAbi(allLibraries, abi, providedLibraries.build()); installNativeLibrariesForAbi(abi, libraries); providedLibraries.addAll(libraries.keySet()); } }
private FlagGroup(CToolchain.FlagGroup flagGroup) throws InvalidConfigurationException { ImmutableList.Builder<Flag> flags = ImmutableList.builder(); ImmutableSet.Builder<String> usedVariables = ImmutableSet.builder(); for (String flag : flagGroup.getFlagList()) { FlagParser parser = new FlagParser(flag); flags.add(new Flag(parser.getChunks())); usedVariables.addAll(parser.getUsedVariables()); } this.flags = flags.build(); this.usedVariables = usedVariables.build(); }
@Override public Collection<? extends Object> getPlanningValues() { if (planningValues == null) { final ImmutableSet.Builder<Visit> valuesBuilder = ImmutableSet.builder(); for (final Changeset c : changesets) { valuesBuilder.addAll(isUndo ? c.getOriginalValues() : c.getTargetValues()); } planningValues = valuesBuilder.build(); } return planningValues; }
private Set<HivePrivilege> getPrivileges(String user, HiveObjectRef objectReference) { ImmutableSet.Builder<HivePrivilege> privileges = ImmutableSet.builder(); try (HiveMetastoreClient client = clientProvider.createMetastoreClient()) { PrincipalPrivilegeSet privilegeSet = client.getPrivilegeSet(objectReference, user, null); if (privilegeSet != null) { Map<String, List<PrivilegeGrantInfo>> userPrivileges = privilegeSet.getUserPrivileges(); if (userPrivileges != null) { privileges.addAll(toGrants(userPrivileges.get(user))); } for (List<PrivilegeGrantInfo> rolePrivileges : privilegeSet.getRolePrivileges().values()) { privileges.addAll(toGrants(rolePrivileges)); } // We do not add the group permissions as Hive does not seem to process these } } catch (TException e) { throw new PrestoException(HIVE_METASTORE_ERROR, e); } return privileges.build(); }
@SuppressWarnings("unchecked") public Set<T> toSet() { ImmutableSet.Builder<T> builder = ImmutableSet.builder(); for (Object obj : elements) { if (obj instanceof List) { builder.addAll((List<T>) obj); } else { builder.add((T) obj); } } return builder.build(); }
@Override public Set<HivePrivilege> getDatabasePrivileges(String user, String databaseName) { ImmutableSet.Builder<HivePrivilege> privileges = ImmutableSet.builder(); if (isDatabaseOwner(user, databaseName)) { privileges.add(OWNERSHIP); } privileges.addAll( getPrivileges( user, new HiveObjectRef(HiveObjectType.DATABASE, databaseName, null, null, null))); return privileges.build(); }
private Set<HivePrivilege> loadTablePrivileges( String user, String databaseName, String tableName) { ImmutableSet.Builder<HivePrivilege> privileges = ImmutableSet.builder(); if (isTableOwner(user, databaseName, tableName)) { privileges.add(OWNERSHIP); } privileges.addAll( getPrivileges( user, new HiveObjectRef(HiveObjectType.TABLE, databaseName, tableName, null, null))); return privileges.build(); }