Exemple #1
0
  private int runTestsExternal(
      final CommandRunnerParams params,
      Build build,
      Iterable<String> command,
      Iterable<TestRule> testRules)
      throws InterruptedException, IOException {
    TestRunningOptions options = getTestRunningOptions(params);

    // Walk the test rules, collecting all the specs.
    List<ExternalTestRunnerTestSpec> specs = Lists.newArrayList();
    for (TestRule testRule : testRules) {
      if (!(testRule instanceof ExternalTestRunnerRule)) {
        params
            .getBuckEventBus()
            .post(
                ConsoleEvent.severe(
                    String.format(
                        "Test %s does not support external test running",
                        testRule.getBuildTarget())));
        return 1;
      }
      ExternalTestRunnerRule rule = (ExternalTestRunnerRule) testRule;
      specs.add(rule.getExternalTestRunnerSpec(build.getExecutionContext(), options));
    }

    // Serialize the specs to a file to pass into the test runner.
    Path infoFile =
        params
            .getCell()
            .getFilesystem()
            .resolve(BuckConstant.SCRATCH_PATH.resolve("external_runner_specs.json"));
    Files.createDirectories(infoFile.getParent());
    Files.deleteIfExists(infoFile);
    params.getObjectMapper().writerWithDefaultPrettyPrinter().writeValue(infoFile.toFile(), specs);

    // Launch and run the external test runner, forwarding it's stdout/stderr to the console.
    // We wait for it to complete then returns its error code.
    ListeningProcessExecutor processExecutor = new ListeningProcessExecutor();
    ProcessExecutorParams processExecutorParams =
        ProcessExecutorParams.builder()
            .addAllCommand(command)
            .addAllCommand(withDashArguments)
            .addCommand("--buck-test-info", infoFile.toString())
            .setDirectory(params.getCell().getFilesystem().getRootPath().toFile())
            .build();
    ForwardingProcessListener processListener =
        new ForwardingProcessListener(
            Channels.newChannel(params.getConsole().getStdOut()),
            Channels.newChannel(params.getConsole().getStdErr()));
    ListeningProcessExecutor.LaunchedProcess process =
        processExecutor.launchProcess(processExecutorParams, processListener);
    try {
      return processExecutor.waitForProcess(process, Long.MAX_VALUE, TimeUnit.DAYS);
    } finally {
      processExecutor.destroyProcess(process, /* force */ false);
      processExecutor.waitForProcess(process, Long.MAX_VALUE, TimeUnit.DAYS);
    }
  }
Exemple #2
0
 public static int parseAndWriteBuckCompatibleDepfile(
     ExecutionContext context,
     ProjectFilesystem filesystem,
     HeaderPathNormalizer headerPathNormalizer,
     HeaderVerification headerVerification,
     Path sourceDepFile,
     Path destDepFile,
     Path inputPath,
     Path outputPath)
     throws IOException {
   // Process the dependency file, fixing up the paths, and write it out to it's final location.
   // The paths of the headers written out to the depfile are the paths to the symlinks from the
   // root of the repo if the compilation included them from the header search paths pointing to
   // the symlink trees, or paths to headers relative to the source file if the compilation
   // included them using source relative include paths. To handle both cases we check for the
   // prerequisites both in the values and the keys of the replacement map.
   Logger.get(Depfiles.class).debug("Processing dependency file %s as Makefile", sourceDepFile);
   ImmutableMap<String, Object> params =
       ImmutableMap.<String, Object>of("input", inputPath, "output", outputPath);
   try (InputStream input = filesystem.newFileInputStream(sourceDepFile);
       BufferedReader reader = new BufferedReader(new InputStreamReader(input));
       OutputStream output = filesystem.newFileOutputStream(destDepFile);
       BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(output));
       SimplePerfEvent.Scope perfEvent =
           SimplePerfEvent.scope(
               context.getBuckEventBus(), PerfEventId.of("depfile-parse"), params)) {
     ImmutableList<String> prereqs = Depfiles.parseDepfile(reader).getPrereqs();
     // Skip the first prereq, as it's the input source file.
     Preconditions.checkState(inputPath.toString().equals(prereqs.get(0)));
     ImmutableList<String> headers = prereqs.subList(1, prereqs.size());
     for (String rawHeader : headers) {
       Path header = Paths.get(rawHeader).normalize();
       Optional<Path> absolutePath =
           headerPathNormalizer.getAbsolutePathForUnnormalizedPath(header);
       if (absolutePath.isPresent()) {
         Preconditions.checkState(absolutePath.get().isAbsolute());
         writer.write(absolutePath.get().toString());
         writer.newLine();
       } else if (headerVerification.getMode() != HeaderVerification.Mode.IGNORE
           && !headerVerification.isWhitelisted(header.toString())) {
         context
             .getBuckEventBus()
             .post(
                 ConsoleEvent.create(
                     headerVerification.getMode() == HeaderVerification.Mode.ERROR
                         ? Level.SEVERE
                         : Level.WARNING,
                     "%s: included an untracked header \"%s\"",
                     inputPath,
                     header));
         if (headerVerification.getMode() == HeaderVerification.Mode.ERROR) {
           return 1;
         }
       }
     }
   }
   return 0;
 }
Exemple #3
0
  /**
   * Creates a zip file of the metadata and recorded artifacts and stores it in the artifact cache.
   */
  public void performUploadToArtifactCache(
      ImmutableSet<RuleKey> ruleKeys, ArtifactCache artifactCache, BuckEventBus eventBus)
      throws InterruptedException {

    // Skip all of this if caching is disabled. Although artifactCache.store() will be a noop,
    // building up the zip is wasted I/O.
    if (!artifactCache.isStoreSupported()) {
      return;
    }

    ArtifactCompressionEvent.Started started =
        ArtifactCompressionEvent.started(ArtifactCompressionEvent.Operation.COMPRESS, ruleKeys);
    eventBus.post(started);

    final Path zip;
    ImmutableSet<Path> pathsToIncludeInZip = ImmutableSet.of();
    ImmutableMap<String, String> buildMetadata;
    try {
      pathsToIncludeInZip = getRecordedDirsAndFiles();
      zip =
          Files.createTempFile(
              "buck_artifact_" + MoreFiles.sanitize(buildTarget.getShortName()), ".zip");
      buildMetadata = getBuildMetadata();
      projectFilesystem.createZip(pathsToIncludeInZip, zip, ImmutableMap.<Path, String>of());
    } catch (IOException e) {
      eventBus.post(
          ConsoleEvent.info(
              "Failed to create zip for %s containing:\n%s",
              buildTarget, Joiner.on('\n').join(ImmutableSortedSet.copyOf(pathsToIncludeInZip))));
      e.printStackTrace();
      return;
    } finally {
      eventBus.post(ArtifactCompressionEvent.finished(started));
    }

    // Store the artifact, including any additional metadata.
    ListenableFuture<Void> storeFuture =
        artifactCache.store(ruleKeys, buildMetadata, BorrowablePath.notBorrowablePath(zip));
    storeFuture.addListener(
        new Runnable() {
          @Override
          public void run() {
            try {
              Files.deleteIfExists(zip);
            } catch (IOException e) {
              throw new RuntimeException(e);
            }
          }
        },
        directExecutor());
  }
Exemple #4
0
  private int runTestsInternal(
      CommandRunnerParams params,
      BuildEngine buildEngine,
      Build build,
      Iterable<TestRule> testRules)
      throws InterruptedException, IOException {

    if (!withDashArguments.isEmpty()) {
      params
          .getBuckEventBus()
          .post(
              ConsoleEvent.severe("Unexpected arguments after \"--\" when using internal runner"));
      return 1;
    }

    ConcurrencyLimit concurrencyLimit =
        new ConcurrencyLimit(
            getNumTestThreads(params.getBuckConfig()), params.getBuckConfig().getLoadLimit());
    try (CommandThreadManager testPool =
        new CommandThreadManager(
            "Test-Run", params.getBuckConfig().getWorkQueueExecutionOrder(), concurrencyLimit)) {
      return TestRunning.runTests(
          params,
          testRules,
          Preconditions.checkNotNull(build.getBuildContext()),
          build.getExecutionContext(),
          getTestRunningOptions(params),
          testPool.getExecutor(),
          buildEngine,
          new DefaultStepRunner(build.getExecutionContext()));
    } catch (ExecutionException e) {
      params
          .getBuckEventBus()
          .post(ConsoleEvent.severe(MoreExceptions.getHumanReadableOrLocalizedMessage(e)));
      return 1;
    }
  }
    private void installFiles(
        String filesType,
        ImmutableMap<String, Path> filesToInstallByHash,
        String metadataFileContents,
        String filenameFormat,
        Path destinationDirRelativeToDataRoot)
        throws Exception {
      try (TraceEventLogger ignored1 =
          TraceEventLogger.start(eventBus, "multi_install_" + filesType)) {
        device.createForward(agentPort, agentPort);
        try {
          for (Map.Entry<String, Path> entry : filesToInstallByHash.entrySet()) {
            Path destination =
                destinationDirRelativeToDataRoot.resolve(
                    String.format(filenameFormat, entry.getKey()));
            Path source = entry.getValue();

            try (TraceEventLogger ignored2 =
                TraceEventLogger.start(eventBus, "install_" + filesType)) {
              installFile(device, agentPort, destination, source);
            }
          }
          try (TraceEventLogger ignored3 =
              TraceEventLogger.start(eventBus, "install_" + filesType + "_metadata")) {
            try (NamedTemporaryFile temp = new NamedTemporaryFile("metadata", "tmp")) {
              com.google.common.io.Files.write(
                  metadataFileContents.getBytes(Charsets.UTF_8), temp.get().toFile());
              installFile(
                  device,
                  agentPort,
                  destinationDirRelativeToDataRoot.resolve("metadata.txt"),
                  temp.get());
            }
          }
        } finally {
          try {
            device.removeForward(agentPort, agentPort);
          } catch (AdbCommandRejectedException e) {
            LOG.warn(e, "Failed to remove adb forward on port %d for device %s", agentPort, device);
            eventBus.post(
                ConsoleEvent.warning(
                    "Failed to remove adb forward %d. This is not necessarily a problem\n"
                        + "because it will be recreated during the next exopackage installation.\n"
                        + "See the log for the full exception.",
                    agentPort));
          }
        }
      }
    }
Exemple #6
0
  @Override
  public ImmutableList<Step> getBuildSteps(
      BuildContext context, BuildableContext buildableContext) {

    ImmutableList.Builder<Step> commands = ImmutableList.builder();

    commands.add(new MakeCleanDirectoryStep(getProjectFilesystem(), genPath));

    BuildTarget target = getBuildTarget();
    Path outputDirectory = BuildTargets.getScratchPath(target, "__%s.aidl");
    commands.add(new MakeCleanDirectoryStep(getProjectFilesystem(), outputDirectory));

    AidlStep command =
        new AidlStep(
            getProjectFilesystem(),
            target,
            getResolver().getAbsolutePath(aidlFilePath),
            ImmutableSet.of(importPath),
            outputDirectory);
    commands.add(command);

    // Files must ultimately be written to GEN_DIR to be used as source paths.
    Path genDirectory = Paths.get(BuckConstant.GEN_DIR, importPath);

    // Warn the user if the genDirectory is not under the output directory.
    if (!importPath.startsWith(target.getBasePath().toString())) {
      // TODO(shs96c): Make this fatal. Give people some time to clean up their rules.
      context
          .getEventBus()
          .post(
              ConsoleEvent.warning(
                  "%s, gen_aidl import path (%s) should be a child of %s",
                  target, importPath, target.getBasePath()));
    }

    commands.add(new MkdirStep(getProjectFilesystem(), genDirectory));

    commands.add(
        new JarDirectoryStep(
            getProjectFilesystem(),
            output,
            ImmutableSortedSet.of(outputDirectory),
            /* main class */ null,
            /* manifest */ null));
    buildableContext.recordArtifact(output);

    return commands.build();
  }
 /** Formats a {@link ConsoleEvent} and adds it to {@code lines}. */
 protected void formatConsoleEvent(ConsoleEvent logEvent, ImmutableList.Builder<String> lines) {
   String formattedLine = "";
   if (logEvent.getLevel().equals(Level.INFO)) {
     formattedLine = logEvent.getMessage();
   } else if (logEvent.getLevel().equals(Level.WARNING)) {
     formattedLine = ansi.asWarningText(logEvent.getMessage());
   } else if (logEvent.getLevel().equals(Level.SEVERE)) {
     formattedLine = ansi.asErrorText(logEvent.getMessage());
   }
   if (!formattedLine.isEmpty()) {
     // Split log messages at newlines and add each line individually to keep the line count
     // consistent.
     lines.addAll(Splitter.on("\n").split(formattedLine));
   }
 }
    private boolean shouldAppBeInstalled() throws Exception {
      Optional<PackageInfo> appPackageInfo = getPackageInfo(packageName);
      if (!appPackageInfo.isPresent()) {
        eventBus.post(ConsoleEvent.info("App not installed.  Installing now."));
        return true;
      }

      LOG.debug("App path: %s", appPackageInfo.get().apkPath);
      String installedAppSignature = getInstalledAppSignature(appPackageInfo.get().apkPath);
      String localAppSignature =
          AgentUtil.getJarSignature(
              apkRule.getProjectFilesystem().resolve(apkRule.getApkPath()).toString());
      LOG.debug("Local app signature: %s", localAppSignature);
      LOG.debug("Remote app signature: %s", installedAppSignature);

      if (!installedAppSignature.equals(localAppSignature)) {
        LOG.debug("App signatures do not match.  Must re-install.");
        return true;
      }

      LOG.debug("App signatures match.  No need to install.");
      return false;
    }
Exemple #9
0
  @Override
  public int runWithoutHelp(CommandRunnerParams params) throws IOException, InterruptedException {
    LOG.debug("Running with arguments %s", getArguments());

    try (CommandThreadManager pool =
        new CommandThreadManager(
            "Test",
            params.getBuckConfig().getWorkQueueExecutionOrder(),
            getConcurrencyLimit(params.getBuckConfig()))) {
      // Post the build started event, setting it to the Parser recorded start time if appropriate.
      BuildEvent.Started started = BuildEvent.started(getArguments());
      if (params.getParser().getParseStartTime().isPresent()) {
        params.getBuckEventBus().post(started, params.getParser().getParseStartTime().get());
      } else {
        params.getBuckEventBus().post(started);
      }

      // The first step is to parse all of the build files. This will populate the parser and find
      // all of the test rules.
      TargetGraph targetGraph;
      ImmutableSet<BuildTarget> explicitBuildTargets;

      try {

        // If the user asked to run all of the tests, parse all of the build files looking for any
        // test rules.
        if (isRunAllTests()) {
          targetGraph =
              params
                  .getParser()
                  .buildTargetGraphForTargetNodeSpecs(
                      params.getBuckEventBus(),
                      params.getCell(),
                      getEnableProfiling(),
                      pool.getExecutor(),
                      ImmutableList.of(
                          TargetNodePredicateSpec.of(
                              new Predicate<TargetNode<?>>() {
                                @Override
                                public boolean apply(TargetNode<?> input) {
                                  return input.getType().isTestRule();
                                }
                              },
                              BuildFileSpec.fromRecursivePath(Paths.get("")))))
                  .getSecond();
          explicitBuildTargets = ImmutableSet.of();

          // Otherwise, the user specified specific test targets to build and run, so build a graph
          // around these.
        } else {
          LOG.debug("Parsing graph for arguments %s", getArguments());
          Pair<ImmutableSet<BuildTarget>, TargetGraph> result =
              params
                  .getParser()
                  .buildTargetGraphForTargetNodeSpecs(
                      params.getBuckEventBus(),
                      params.getCell(),
                      getEnableProfiling(),
                      pool.getExecutor(),
                      parseArgumentsAsTargetNodeSpecs(params.getBuckConfig(), getArguments()));
          targetGraph = result.getSecond();
          explicitBuildTargets = result.getFirst();

          LOG.debug("Got explicit build targets %s", explicitBuildTargets);
          ImmutableSet.Builder<BuildTarget> testTargetsBuilder = ImmutableSet.builder();
          for (TargetNode<?> node : targetGraph.getAll(explicitBuildTargets)) {
            ImmutableSortedSet<BuildTarget> nodeTests = TargetNodes.getTestTargetsForNode(node);
            if (!nodeTests.isEmpty()) {
              LOG.debug("Got tests for target %s: %s", node.getBuildTarget(), nodeTests);
              testTargetsBuilder.addAll(nodeTests);
            }
          }
          ImmutableSet<BuildTarget> testTargets = testTargetsBuilder.build();
          if (!testTargets.isEmpty()) {
            LOG.debug("Got related test targets %s, building new target graph...", testTargets);
            targetGraph =
                params
                    .getParser()
                    .buildTargetGraph(
                        params.getBuckEventBus(),
                        params.getCell(),
                        getEnableProfiling(),
                        pool.getExecutor(),
                        Iterables.concat(explicitBuildTargets, testTargets));
            LOG.debug("Finished building new target graph with tests.");
          }
        }

      } catch (BuildTargetException | BuildFileParseException e) {
        params
            .getBuckEventBus()
            .post(ConsoleEvent.severe(MoreExceptions.getHumanReadableOrLocalizedMessage(e)));
        return 1;
      }

      TargetGraphToActionGraph targetGraphToActionGraph =
          new TargetGraphToActionGraph(
              params.getBuckEventBus(), new BuildTargetNodeToBuildRuleTransformer());
      Pair<ActionGraph, BuildRuleResolver> actionGraphAndResolver =
          Preconditions.checkNotNull(targetGraphToActionGraph.apply(targetGraph));

      // Look up all of the test rules in the action graph.
      Iterable<TestRule> testRules =
          Iterables.filter(actionGraphAndResolver.getFirst().getNodes(), TestRule.class);

      // Unless the user requests that we build filtered tests, filter them out here, before
      // the build.
      if (!isBuildFiltered(params.getBuckConfig())) {
        testRules = filterTestRules(params.getBuckConfig(), explicitBuildTargets, testRules);
      }

      if (isDryRun()) {
        printMatchingTestRules(params.getConsole(), testRules);
      }

      CachingBuildEngine cachingBuildEngine =
          new CachingBuildEngine(
              pool.getExecutor(),
              params.getFileHashCache(),
              getBuildEngineMode().or(params.getBuckConfig().getBuildEngineMode()),
              params.getBuckConfig().getDependencySchedulingOrder(),
              params.getBuckConfig().getBuildDepFiles(),
              params.getBuckConfig().getBuildMaxDepFileCacheEntries(),
              actionGraphAndResolver.getSecond());
      try (Build build =
          createBuild(
              params.getBuckConfig(),
              actionGraphAndResolver.getFirst(),
              actionGraphAndResolver.getSecond(),
              params.getAndroidPlatformTargetSupplier(),
              cachingBuildEngine,
              params.getArtifactCache(),
              params.getConsole(),
              params.getBuckEventBus(),
              getTargetDeviceOptional(),
              params.getPlatform(),
              params.getEnvironment(),
              params.getObjectMapper(),
              params.getClock(),
              Optional.of(getAdbOptions(params.getBuckConfig())),
              Optional.of(getTargetDeviceOptions()))) {

        // Build all of the test rules.
        int exitCode =
            build.executeAndPrintFailuresToEventBus(
                testRules,
                isKeepGoing(),
                params.getBuckEventBus(),
                params.getConsole(),
                getPathToBuildReport(params.getBuckConfig()));
        params.getBuckEventBus().post(BuildEvent.finished(started, exitCode));
        if (exitCode != 0) {
          return exitCode;
        }

        // If the user requests that we build tests that we filter out, then we perform
        // the filtering here, after we've done the build but before we run the tests.
        if (isBuildFiltered(params.getBuckConfig())) {
          testRules = filterTestRules(params.getBuckConfig(), explicitBuildTargets, testRules);
        }

        // Once all of the rules are built, then run the tests.
        Optional<ImmutableList<String>> externalTestRunner =
            params.getBuckConfig().getExternalTestRunner();
        if (externalTestRunner.isPresent()) {
          return runTestsExternal(params, build, externalTestRunner.get(), testRules);
        }
        return runTestsInternal(params, cachingBuildEngine, build, testRules);
      }
    }
  }
Exemple #10
0
  private CacheResult tryToFetchArtifactFromBuildCacheAndOverlayOnTopOfProjectFilesystem(
      BuildRule rule,
      RuleKey ruleKey,
      BuildInfoRecorder buildInfoRecorder,
      ArtifactCache artifactCache,
      ProjectFilesystem filesystem,
      BuildContext buildContext)
      throws InterruptedException {

    // Create a temp file whose extension must be ".zip" for Filesystems.newFileSystem() to infer
    // that we are creating a zip-based FileSystem.
    Path zipFile;
    try {
      zipFile =
          Files.createTempFile(
              "buck_artifact_" + MoreFiles.sanitize(rule.getBuildTarget().getShortName()), ".zip");
    } catch (IOException e) {
      throw new RuntimeException(e);
    }

    // TODO(mbolin): Change ArtifactCache.fetch() so that it returns a File instead of takes one.
    // Then we could download directly from the remote cache into the on-disk cache and unzip it
    // from there.
    CacheResult cacheResult =
        buildInfoRecorder.fetchArtifactForBuildable(ruleKey, zipFile, artifactCache);
    if (!cacheResult.getType().isSuccess()) {
      try {
        Files.delete(zipFile);
      } catch (IOException e) {
        LOG.warn(e, "failed to delete %s", zipFile);
      }
      return cacheResult;
    }

    // We unzip the file in the root of the project directory.
    // Ideally, the following would work:
    //
    // Path pathToZip = Paths.get(zipFile.getAbsolutePath());
    // FileSystem fs = FileSystems.newFileSystem(pathToZip, /* loader */ null);
    // Path root = Iterables.getOnlyElement(fs.getRootDirectories());
    // MoreFiles.copyRecursively(root, projectRoot);
    //
    // Unfortunately, this does not appear to work, in practice, because MoreFiles fails when trying
    // to resolve a Path for a zip entry against a file Path on disk.
    ArtifactCacheEvent.Started started =
        ArtifactCacheEvent.started(
            ArtifactCacheEvent.Operation.DECOMPRESS, ImmutableSet.of(ruleKey));
    buildContext.getEventBus().post(started);
    try {
      Unzip.extractZipFile(
          zipFile.toAbsolutePath(),
          filesystem,
          Unzip.ExistingFileMode.OVERWRITE_AND_CLEAN_DIRECTORIES);

      // We only delete the ZIP file when it has been unzipped successfully. Otherwise, we leave it
      // around for debugging purposes.
      Files.delete(zipFile);

      if (cacheResult.getType() == CacheResult.Type.HIT) {

        // If we have a hit, also write out the build metadata.
        Path metadataDir = BuildInfo.getPathToMetadataDirectory(rule.getBuildTarget());
        for (Map.Entry<String, String> ent : cacheResult.getMetadata().entrySet()) {
          Path dest = metadataDir.resolve(ent.getKey());
          filesystem.createParentDirs(dest);
          filesystem.writeContentsToPath(ent.getValue(), dest);
        }
      }

    } catch (IOException e) {
      // In the wild, we have seen some inexplicable failures during this step. For now, we try to
      // give the user as much information as we can to debug the issue, but return CacheResult.MISS
      // so that Buck will fall back on doing a local build.
      buildContext
          .getEventBus()
          .post(
              ConsoleEvent.warning(
                  "Failed to unzip the artifact for %s at %s.\n"
                      + "The rule will be built locally, "
                      + "but here is the stacktrace of the failed unzip call:\n"
                      + rule.getBuildTarget(),
                  zipFile.toAbsolutePath(),
                  Throwables.getStackTraceAsString(e)));
      return CacheResult.miss();
    } finally {
      buildContext.getEventBus().post(ArtifactCacheEvent.finished(started));
    }

    return cacheResult;
  }
Exemple #11
0
  /**
   * Query Watchman for file change events. If too many events are pending or an error occurs an
   * overflow event is posted to the EventBus signalling that events may have been lost (and so
   * typically caches must be cleared to avoid inconsistency). Interruptions and IOExceptions are
   * propagated to callers, but typically if overflow events are handled conservatively by
   * subscribers then no other remedial action is required.
   */
  @Override
  public void postEvents(BuckEventBus buckEventBus) throws IOException, InterruptedException {
    ProcessExecutor.LaunchedProcess watchmanProcess =
        processExecutor.launchProcess(
            ProcessExecutorParams.builder()
                .addCommand("watchman", "--server-encoding=json", "--no-pretty", "-j")
                .build());
    try {
      LOG.debug("Writing query to Watchman: %s", query);
      watchmanProcess.getOutputStream().write(query.getBytes(Charsets.US_ASCII));
      watchmanProcess.getOutputStream().close();
      LOG.debug("Parsing JSON output from Watchman");
      final long parseStartTimeMillis = clock.currentTimeMillis();
      InputStream jsonInput = watchmanProcess.getInputStream();
      if (LOG.isVerboseEnabled()) {
        byte[] fullResponse = ByteStreams.toByteArray(jsonInput);
        jsonInput.close();
        jsonInput = new ByteArrayInputStream(fullResponse);
        LOG.verbose("Full JSON: " + new String(fullResponse, Charsets.UTF_8).trim());
      }
      JsonParser jsonParser = objectMapper.getJsonFactory().createJsonParser(jsonInput);
      PathEventBuilder builder = new PathEventBuilder();
      JsonToken token = jsonParser.nextToken();
      /*
       * Watchman returns changes as an array of JSON objects with potentially unstable key
       * ordering:
       * {
       *     "files": [
       *     {
       *         "new": false,
       *         "exists": true,
       *         "name": "bin/buckd",
       *     },
       *     ]
       * }
       * A simple way to parse these changes is to collect the relevant values from each object
       * in a builder and then build an event when the end of a JSON object is reached. When the end
       * of the enclosing JSON object is processed the builder will not contain a complete event, so
       * the object end token will be ignored.
       */
      int eventCount = 0;
      while (token != null) {
        boolean shouldOverflow = false;
        if (eventCount > overflow) {
          LOG.warn(
              "Received too many events from Watchmen (%d > overflow max %d), posting overflow "
                  + "event and giving up.",
              eventCount, overflow);
          shouldOverflow = true;
        } else {
          long elapsedMillis = clock.currentTimeMillis() - parseStartTimeMillis;
          if (elapsedMillis >= timeoutMillis) {
            LOG.warn(
                "Parsing took too long (timeout %d ms), posting overflow event and giving up.",
                timeoutMillis);
            shouldOverflow = true;
          }
        }

        if (shouldOverflow) {
          postWatchEvent(createOverflowEvent());
          processExecutor.destroyLaunchedProcess(watchmanProcess);
          processExecutor.waitForLaunchedProcess(watchmanProcess);
          return;
        }

        switch (token) {
          case FIELD_NAME:
            String fieldName = jsonParser.getCurrentName();
            switch (fieldName) {
              case "is_fresh_instance":
                // Force caches to be invalidated --- we have no idea what's happening.
                Boolean newInstance = jsonParser.nextBooleanValue();
                if (newInstance) {
                  LOG.info(
                      "Fresh watchman instance detected. "
                          + "Posting overflow event to flush caches.");
                  postWatchEvent(createOverflowEvent());
                }
                break;

              case "name":
                builder.setPath(Paths.get(jsonParser.nextTextValue()));
                break;
              case "new":
                if (jsonParser.nextBooleanValue()) {
                  builder.setCreationEvent();
                }
                break;
              case "exists":
                if (!jsonParser.nextBooleanValue()) {
                  builder.setDeletionEvent();
                }
                break;
              case "error":
                WatchmanWatcherException e =
                    new WatchmanWatcherException(jsonParser.nextTextValue());
                LOG.error(
                    e, "Error in Watchman output. Posting an overflow event to flush the caches");
                postWatchEvent(createOverflowEvent());
                throw e;
              case "warning":
                String message = jsonParser.nextTextValue();
                buckEventBus.post(
                    ConsoleEvent.warning("Watchman has produced a warning: %s", message));
                LOG.warn("Watchman has produced a warning: %s", message);
                break;
            }
            break;
          case END_OBJECT:
            if (builder.canBuild()) {
              postWatchEvent(builder.build());
              ++eventCount;
            }
            builder = new PathEventBuilder();
            break;
            // $CASES-OMITTED$
          default:
            break;
        }
        token = jsonParser.nextToken();
      }
      int watchmanExitCode;
      LOG.debug("Posted %d Watchman events. Waiting for subprocess to exit...", eventCount);
      watchmanExitCode = processExecutor.waitForLaunchedProcess(watchmanProcess);
      if (watchmanExitCode != 0) {
        LOG.error("Watchman exited with error code %d", watchmanExitCode);
        postWatchEvent(createOverflowEvent()); // Events may have been lost, signal overflow.
        ByteArrayOutputStream buffer = new ByteArrayOutputStream();
        ByteStreams.copy(watchmanProcess.getErrorStream(), buffer);
        throw new WatchmanWatcherException(
            "Watchman failed with exit code " + watchmanExitCode + ": " + buffer.toString());
      } else {
        LOG.debug("Watchman exited cleanly.");
      }
    } catch (InterruptedException e) {
      LOG.warn(e, "Killing Watchman process on interrupted exception");
      postWatchEvent(createOverflowEvent()); // Events may have been lost, signal overflow.
      processExecutor.destroyLaunchedProcess(watchmanProcess);
      processExecutor.waitForLaunchedProcess(watchmanProcess);
      Thread.currentThread().interrupt();
      throw e;
    } catch (IOException e) {
      LOG.error(e, "Killing Watchman process on I/O exception");
      postWatchEvent(createOverflowEvent()); // Events may have been lost, signal overflow.
      processExecutor.destroyLaunchedProcess(watchmanProcess);
      processExecutor.waitForLaunchedProcess(watchmanProcess);
      throw e;
    }
  }