Beispiel #1
0
  /**
   * Performs the initial phases 0-2 of the build: Setup, Loading and Analysis.
   *
   * <p>Postcondition: On success, populates the BuildRequest's set of targets to build.
   *
   * @return null if loading / analysis phases were successful; a useful error message if loading or
   *     analysis phase errors were encountered and request.keepGoing.
   * @throws InterruptedException if the current thread was interrupted.
   * @throws ViewCreationFailedException if analysis failed for any reason.
   */
  private AnalysisResult runAnalysisPhase(
      BuildRequest request,
      LoadingResult loadingResult,
      BuildConfigurationCollection configurations)
      throws InterruptedException, ViewCreationFailedException {
    Stopwatch timer = Stopwatch.createStarted();
    if (!request.getBuildOptions().performAnalysisPhase) {
      getReporter().handle(Event.progress("Loading complete."));
      LOG.info("No analysis requested, so finished");
      return AnalysisResult.EMPTY;
    }

    getReporter().handle(Event.progress("Loading complete.  Analyzing..."));
    Profiler.instance().markPhase(ProfilePhase.ANALYZE);

    AnalysisResult analysisResult =
        getView()
            .update(
                loadingResult,
                configurations,
                request.getAspects(),
                request.getViewOptions(),
                request.getTopLevelArtifactContext(),
                getReporter(),
                getEventBus());

    // TODO(bazel-team): Merge these into one event.
    getEventBus()
        .post(
            new AnalysisPhaseCompleteEvent(
                analysisResult.getTargetsToBuild(),
                getView().getTargetsVisited(),
                timer.stop().elapsed(TimeUnit.MILLISECONDS)));
    getEventBus()
        .post(
            new TestFilteringCompleteEvent(
                analysisResult.getTargetsToBuild(), analysisResult.getTargetsToTest()));

    // Check licenses.
    // We check licenses if the first target configuration has license checking enabled. Right now,
    // it is not possible to have multiple target configurations with different settings for this
    // flag, which allows us to take this short cut.
    boolean checkLicenses = configurations.getTargetConfigurations().get(0).checkLicenses();
    if (checkLicenses) {
      Profiler.instance().markPhase(ProfilePhase.LICENSE);
      validateLicensingForTargets(
          analysisResult.getTargetsToBuild(), request.getViewOptions().keepGoing);
    }

    return analysisResult;
  }
Beispiel #2
0
 private void reportTargets(AnalysisResult analysisResult) {
   Collection<ConfiguredTarget> targetsToBuild = analysisResult.getTargetsToBuild();
   Collection<ConfiguredTarget> targetsToTest = analysisResult.getTargetsToTest();
   if (targetsToTest != null) {
     int testCount = targetsToTest.size();
     int targetCount = targetsToBuild.size() - testCount;
     if (targetCount == 0) {
       getReporter()
           .handle(
               Event.info(
                   "Found "
                       + testCount
                       + (testCount == 1 ? " test target..." : " test targets...")));
     } else {
       getReporter()
           .handle(
               Event.info(
                   "Found "
                       + targetCount
                       + (targetCount == 1 ? " target and " : " targets and ")
                       + testCount
                       + (testCount == 1 ? " test target..." : " test targets...")));
     }
   } else {
     int targetCount = targetsToBuild.size();
     getReporter()
         .handle(
             Event.info(
                 "Found " + targetCount + (targetCount == 1 ? " target..." : " targets...")));
   }
 }
Beispiel #3
0
  /**
   * The crux of the build system. Builds the targets specified in the request using the specified
   * Executor.
   *
   * <p>Performs loading, analysis and execution for the specified set of targets, honoring the
   * configuration options in the BuildRequest. Returns normally iff successful, throws an exception
   * otherwise.
   *
   * <p>Callers must ensure that {@link #stopRequest} is called after this method, even if it
   * throws.
   *
   * <p>The caller is responsible for setting up and syncing the package cache.
   *
   * <p>During this function's execution, the actualTargets and successfulTargets fields of the
   * request object are set.
   *
   * @param request the build request that this build tool is servicing, which specifies various
   *     options; during this method's execution, the actualTargets and successfulTargets fields of
   *     the request object are populated
   * @param result the build result that is the mutable result of this build
   * @param validator target validator
   */
  public void buildTargets(BuildRequest request, BuildResult result, TargetValidator validator)
      throws BuildFailedException, LocalEnvironmentException, InterruptedException,
          ViewCreationFailedException, TargetParsingException, LoadingFailedException,
          ExecutorInitException, AbruptExitException, InvalidConfigurationException,
          TestExecException {
    validateOptions(request);
    BuildOptions buildOptions = runtime.createBuildOptions(request);
    // Sync the package manager before sending the BuildStartingEvent in runLoadingPhase()
    runtime.setupPackageCache(
        request.getPackageCacheOptions(), DefaultsPackage.getDefaultsPackageContent(buildOptions));

    ExecutionTool executionTool = null;
    LoadingResult loadingResult = null;
    BuildConfigurationCollection configurations = null;
    try {
      getEventBus().post(new BuildStartingEvent(runtime.getOutputFileSystem(), request));
      LOG.info("Build identifier: " + request.getId());
      executionTool = new ExecutionTool(runtime, request);
      if (needsExecutionPhase(request.getBuildOptions())) {
        // Initialize the execution tool early if we need it. This hides the latency of setting up
        // the execution backends.
        executionTool.init();
      }

      // Loading phase.
      loadingResult = runLoadingPhase(request, validator);

      // Create the build configurations.
      if (!request.getMultiCpus().isEmpty()) {
        getReporter()
            .handle(
                Event.warn(
                    "The --experimental_multi_cpu option is _very_ experimental and only intended for "
                        + "internal testing at this time. If you do not work on the build tool, then you "
                        + "should stop now!"));
        if (!"build".equals(request.getCommandName()) && !"test".equals(request.getCommandName())) {
          throw new InvalidConfigurationException(
              "The experimental setting to select multiple CPUs is only supported for 'build' and "
                  + "'test' right now!");
        }
      }
      configurations =
          getConfigurations(
              buildOptions, request.getMultiCpus(), request.getViewOptions().keepGoing);

      getEventBus().post(new ConfigurationsCreatedEvent(configurations));
      runtime.throwPendingException();
      if (configurations.getTargetConfigurations().size() == 1) {
        // TODO(bazel-team): This is not optimal - we retain backwards compatibility in the case
        // where there's only a single configuration, but we don't send an event in the multi-config
        // case. Can we do better? [multi-config]
        getEventBus()
            .post(
                new MakeEnvironmentEvent(
                    configurations.getTargetConfigurations().get(0).getMakeEnvironment()));
      }
      LOG.info("Configurations created");

      // Analysis phase.
      AnalysisResult analysisResult = runAnalysisPhase(request, loadingResult, configurations);
      result.setActualTargets(analysisResult.getTargetsToBuild());
      result.setTestTargets(analysisResult.getTargetsToTest());

      checkTargetEnvironmentRestrictions(
          analysisResult.getTargetsToBuild(), runtime.getPackageManager());
      reportTargets(analysisResult);

      // Execution phase.
      if (needsExecutionPhase(request.getBuildOptions())) {
        runtime.getSkyframeExecutor().injectTopLevelContext(request.getTopLevelArtifactContext());
        executionTool.executeBuild(
            request.getId(),
            analysisResult,
            result,
            runtime.getSkyframeExecutor(),
            configurations,
            transformPackageRoots(loadingResult.getPackageRoots()));
      }

      String delayedErrorMsg = analysisResult.getError();
      if (delayedErrorMsg != null) {
        throw new BuildFailedException(delayedErrorMsg);
      }
    } catch (RuntimeException e) {
      // Print an error message for unchecked runtime exceptions. This does not concern Error
      // subclasses such as OutOfMemoryError.
      request
          .getOutErr()
          .printErrLn("Unhandled exception thrown during build; message: " + e.getMessage());
      throw e;
    } finally {
      // Delete dirty nodes to ensure that they do not accumulate indefinitely.
      long versionWindow = request.getViewOptions().versionWindowForDirtyNodeGc;
      if (versionWindow != -1) {
        runtime.getSkyframeExecutor().deleteOldNodes(versionWindow);
      }

      if (executionTool != null) {
        executionTool.shutdown();
      }
      // The workspace status actions will not run with certain flags, or if an error
      // occurs early in the build. Tell a lie so that the event is not missing.
      // If multiple build_info events are sent, only the first is kept, so this does not harm
      // successful runs (which use the workspace status action).
      getEventBus()
          .post(
              new BuildInfoEvent(
                  runtime.getworkspaceStatusActionFactory().createDummyWorkspaceStatus()));
    }

    if (loadingResult != null && loadingResult.hasTargetPatternError()) {
      throw new BuildFailedException(
          "execution phase successful, but there were errors " + "parsing the target pattern");
    }
  }