static Range<Rational> scaleRange(Range<Rational> range, int num, int den) {
   if (num == den) {
     return range;
   }
   return Range.create(
       scaleRatio(range.getLower(), num, den), scaleRatio(range.getUpper(), num, den));
 }
 static Range<Long> parseLongRange(Object o, Range<Long> fallback) {
   try {
     String s = (String) o;
     int ix = s.indexOf('-');
     if (ix >= 0) {
       return Range.create(
           Long.parseLong(s.substring(0, ix), 10), Long.parseLong(s.substring(ix + 1), 10));
     }
     long value = Long.parseLong(s);
     return Range.create(value, value);
   } catch (ClassCastException e) {
   } catch (NumberFormatException e) {
   } catch (NullPointerException e) {
     return fallback;
   } catch (IllegalArgumentException e) {
   }
   Log.w(TAG, "could not parse long range '" + o + "'");
   return fallback;
 }
 static Range<Rational> parseRationalRange(Object o, Range<Rational> fallback) {
   try {
     String s = (String) o;
     int ix = s.indexOf('-');
     if (ix >= 0) {
       return Range.create(
           Rational.parseRational(s.substring(0, ix)),
           Rational.parseRational(s.substring(ix + 1)));
     }
     Rational value = Rational.parseRational(s);
     return Range.create(value, value);
   } catch (ClassCastException e) {
   } catch (NumberFormatException e) {
   } catch (NullPointerException e) {
     return fallback;
   } catch (IllegalArgumentException e) {
   }
   Log.w(TAG, "could not parse rational range '" + o + "'");
   return fallback;
 }
  /**
   * Returns the smallest range that includes this range and the inclusive range specified by {@code
   * [lower, upper]}.
   *
   * <p>See {@link #extend(Range)} for more details.
   *
   * @param lower a non-{@code null} {@code T} reference
   * @param upper a non-{@code null} {@code T} reference
   * @return the extension of this range and the other range.
   * @throws NullPointerException if {@code lower} or {@code upper} was {@code null}
   */
  public Range<T> extend(T lower, T upper) {
    checkNotNull(lower, "lower must not be null");
    checkNotNull(upper, "upper must not be null");

    int cmpLower = lower.compareTo(mLower);
    int cmpUpper = upper.compareTo(mUpper);

    if (cmpLower >= 0 && cmpUpper <= 0) {
      // this inludes other
      return this;
    } else {
      return Range.create(cmpLower >= 0 ? mLower : lower, cmpUpper <= 0 ? mUpper : upper);
    }
  }
  /**
   * Returns the intersection of this range and the inclusive range specified by {@code [lower,
   * upper]}.
   *
   * <p>See {@link #intersect(Range)} for more details.
   *
   * @param lower a non-{@code null} {@code T} reference
   * @param upper a non-{@code null} {@code T} reference
   * @return the intersection of this range and the other range
   * @throws NullPointerException if {@code lower} or {@code upper} was {@code null}
   * @throws IllegalArgumentException if the ranges are disjoint.
   */
  public Range<T> intersect(T lower, T upper) {
    checkNotNull(lower, "lower must not be null");
    checkNotNull(upper, "upper must not be null");

    int cmpLower = lower.compareTo(mLower);
    int cmpUpper = upper.compareTo(mUpper);

    if (cmpLower <= 0 && cmpUpper >= 0) {
      // [lower, upper] includes this
      return this;
    } else {
      return Range.create(cmpLower <= 0 ? mLower : lower, cmpUpper >= 0 ? mUpper : upper);
    }
  }
 /**
  * Returns the index of the range that contains a value in a sorted array of distinct ranges.
  *
  * @param ranges a sorted array of non-intersecting ranges in ascending order
  * @param value the value to search for
  * @return if the value is in one of the ranges, it returns the index of that range. Otherwise,
  *     the return value is {@code (-1-index)} for the {@code index} of the range that is
  *     immediately following {@code value}.
  */
 public static <T extends Comparable<? super T>> int binarySearchDistinctRanges(
     Range<T>[] ranges, T value) {
   return Arrays.binarySearch(
       ranges,
       Range.create(value, value),
       new Comparator<Range<T>>() {
         @Override
         public int compare(Range<T> lhs, Range<T> rhs) {
           if (lhs.getUpper().compareTo(rhs.getLower()) < 0) {
             return -1;
           } else if (lhs.getLower().compareTo(rhs.getUpper()) > 0) {
             return 1;
           }
           return 0;
         }
       });
 }
  /**
   * Returns the smallest range that includes this range and another {@code range}.
   *
   * <p>E.g. if a {@code <} b {@code <} c {@code <} d, the extension of [a, c] and [b, d] ranges is
   * [a, d]. As the endpoints are object references, there is no guarantee which specific endpoint
   * reference is used from the input ranges:
   *
   * <p>E.g. if a {@code ==} a' {@code <} b {@code <} c, the extension of [a, b] and [a', c] ranges
   * could be either [a, c] or ['a, c], where ['a, c] could be either the exact input range, or a
   * newly created range with the same endpoints.
   *
   * @param range a non-{@code null} {@code Range<T>} reference
   * @return the extension of this range and the other range.
   * @throws NullPointerException if {@code range} was {@code null}
   */
  public Range<T> extend(Range<T> range) {
    checkNotNull(range, "range must not be null");

    int cmpLower = range.mLower.compareTo(mLower);
    int cmpUpper = range.mUpper.compareTo(mUpper);

    if (cmpLower <= 0 && cmpUpper >= 0) {
      // other includes this
      return range;
    } else if (cmpLower >= 0 && cmpUpper <= 0) {
      // this inludes other
      return this;
    } else {
      return Range.create(
          cmpLower >= 0 ? mLower : range.mLower, cmpUpper <= 0 ? mUpper : range.mUpper);
    }
  }
 /**
  * Returns the intersection of two sets of non-intersecting ranges
  *
  * @param one a sorted set of non-intersecting ranges in ascending order
  * @param another another sorted set of non-intersecting ranges in ascending order
  * @return the intersection of the two sets, sorted in ascending order
  */
 public static <T extends Comparable<? super T>> Range<T>[] intersectSortedDistinctRanges(
     Range<T>[] one, Range<T>[] another) {
   int ix = 0;
   Vector<Range<T>> result = new Vector<Range<T>>();
   for (Range<T> range : another) {
     while (ix < one.length && one[ix].getUpper().compareTo(range.getLower()) < 0) {
       ++ix;
     }
     while (ix < one.length && one[ix].getUpper().compareTo(range.getUpper()) < 0) {
       result.add(range.intersect(one[ix]));
       ++ix;
     }
     if (ix == one.length) {
       break;
     }
     if (one[ix].getLower().compareTo(range.getUpper()) <= 0) {
       result.add(range.intersect(one[ix]));
     }
   }
   return result.toArray(new Range[result.size()]);
 }
Esempio n. 9
0
  private void yuvBurstTestByCamera(String cameraId) throws Exception {
    // Parameters
    final int MAX_CONVERGENCE_FRAMES = 150; // 5 sec at 30fps
    final long MAX_PREVIEW_RESULT_TIMEOUT_MS = 1000;
    final int BURST_SIZE = 100;
    final float FRAME_DURATION_MARGIN_FRACTION = 0.1f;

    // Find a good preview size (bound to 1080p)
    final Size previewSize = mOrderedPreviewSizes.get(0);

    // Get maximum YUV_420_888 size
    final Size stillSize =
        getSortedSizesForFormat(cameraId, mCameraManager, ImageFormat.YUV_420_888, /*bound*/ null)
            .get(0);

    // Find max pipeline depth and sync latency
    final int maxPipelineDepth =
        mStaticInfo.getCharacteristics().get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH);
    final int maxSyncLatency =
        mStaticInfo.getCharacteristics().get(CameraCharacteristics.SYNC_MAX_LATENCY);

    // Find minimum frame duration for full-res YUV_420_888
    StreamConfigurationMap config =
        mStaticInfo.getCharacteristics().get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    final long minStillFrameDuration =
        config.getOutputMinFrameDuration(ImageFormat.YUV_420_888, stillSize);

    // Find suitable target FPS range - as high as possible
    List<Range<Integer>> fpsRanges =
        Arrays.asList(mStaticInfo.getAeAvailableTargetFpsRangesChecked());
    Range<Integer> targetRange = mStaticInfo.getAeMaxTargetFpsRange();
    // Add 0.05 here so Fps like 29.99 evaluated to 30
    int minBurstFps = (int) Math.floor(1e9 / minStillFrameDuration + 0.05f);
    boolean foundConstantMaxYUVRange = false;
    boolean foundYUVStreamingRange = false;

    for (Range<Integer> fpsRange : fpsRanges) {
      if (fpsRange.getLower() == minBurstFps && fpsRange.getUpper() == minBurstFps) {
        foundConstantMaxYUVRange = true;
      }
      if (fpsRange.getLower() <= 15 && fpsRange.getUpper() == minBurstFps) {
        foundYUVStreamingRange = true;
      }
    }

    assertTrue(
        String.format(
            "Cam %s: Target FPS range of (%d, %d) must be supported",
            cameraId, minBurstFps, minBurstFps),
        foundConstantMaxYUVRange);
    assertTrue(
        String.format(
            "Cam %s: Target FPS range of (x, %d) where x <= 15 must be supported",
            cameraId, minBurstFps),
        foundYUVStreamingRange);
    assertTrue(
        String.format(
            "Cam %s: No target FPS range found with minimum FPS above "
                + " 1/minFrameDuration (%d fps, duration %d ns) for full-resolution YUV",
            cameraId, minBurstFps, minStillFrameDuration),
        targetRange.getLower() >= minBurstFps);

    Log.i(
        TAG,
        String.format(
            "Selected frame rate range %d - %d for YUV burst",
            targetRange.getLower(), targetRange.getUpper()));

    // Check if READ_SENSOR_SETTINGS is supported
    final boolean checkSensorSettings =
        mStaticInfo.isCapabilitySupported(
            CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);

    // Configure basic preview and burst settings

    CaptureRequest.Builder previewBuilder =
        mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    CaptureRequest.Builder burstBuilder =
        mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);

    previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
    burstBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
    burstBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
    burstBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);

    // Create session and start up preview

    SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
    ImageDropperListener imageDropper = new ImageDropperListener();

    prepareCaptureAndStartPreview(
        previewBuilder,
        burstBuilder,
        previewSize,
        stillSize,
        ImageFormat.YUV_420_888,
        resultListener,
        /*maxNumImages*/ 3,
        imageDropper);

    // Create burst

    List<CaptureRequest> burst = new ArrayList<>();
    for (int i = 0; i < BURST_SIZE; i++) {
      burst.add(burstBuilder.build());
    }

    // Converge AE/AWB

    int frameCount = 0;
    while (true) {
      CaptureResult result = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS);
      int aeState = result.get(CaptureResult.CONTROL_AE_STATE);
      int awbState = result.get(CaptureResult.CONTROL_AWB_STATE);

      if (DEBUG) {
        Log.d(TAG, "aeState: " + aeState + ". awbState: " + awbState);
      }

      if ((aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED
              || aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED)
          && awbState == CaptureResult.CONTROL_AWB_STATE_CONVERGED) {
        break;
      }
      frameCount++;
      assertTrue(
          String.format(
              "Cam %s: Can not converge AE and AWB within %d frames",
              cameraId, MAX_CONVERGENCE_FRAMES),
          frameCount < MAX_CONVERGENCE_FRAMES);
    }

    // Lock AF if there's a focuser

    if (mStaticInfo.hasFocuser()) {
      previewBuilder.set(
          CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
      mSession.capture(previewBuilder.build(), resultListener, mHandler);
      previewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);

      frameCount = 0;
      while (true) {
        CaptureResult result = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS);
        int afState = result.get(CaptureResult.CONTROL_AF_STATE);

        if (DEBUG) {
          Log.d(TAG, "afState: " + afState);
        }

        if (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED
            || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
          break;
        }
        frameCount++;
        assertTrue(
            String.format(
                "Cam %s: Cannot lock AF within %d frames", cameraId, MAX_CONVERGENCE_FRAMES),
            frameCount < MAX_CONVERGENCE_FRAMES);
      }
    }

    // Lock AE/AWB

    previewBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
    previewBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);

    CaptureRequest lockedRequest = previewBuilder.build();
    mSession.setRepeatingRequest(lockedRequest, resultListener, mHandler);

    // Wait for first result with locking
    resultListener.drain();
    CaptureResult lockedResult =
        resultListener.getCaptureResultForRequest(lockedRequest, maxPipelineDepth);

    int pipelineDepth = lockedResult.get(CaptureResult.REQUEST_PIPELINE_DEPTH);

    // Then start waiting on results to get the first result that should be synced
    // up, and also fire the burst as soon as possible

    if (maxSyncLatency == CameraCharacteristics.SYNC_MAX_LATENCY_PER_FRAME_CONTROL) {
      // The locked result we have is already synchronized so start the burst
      mSession.captureBurst(burst, resultListener, mHandler);
    } else {
      // Need to get a synchronized result, and may need to start burst later to
      // be synchronized correctly

      boolean burstSent = false;

      // Calculate how many requests we need to still send down to camera before we
      // know the settings have settled for the burst

      int numFramesWaited = maxSyncLatency;
      if (numFramesWaited == CameraCharacteristics.SYNC_MAX_LATENCY_UNKNOWN) {
        numFramesWaited = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY;
      }

      int requestsNeededToSync = numFramesWaited - pipelineDepth;
      for (int i = 0; i < numFramesWaited; i++) {
        if (!burstSent && requestsNeededToSync <= 0) {
          mSession.captureBurst(burst, resultListener, mHandler);
          burstSent = true;
        }
        lockedResult = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS);
        requestsNeededToSync--;
      }

      assertTrue("Cam " + cameraId + ": Burst failed to fire!", burstSent);
    }

    // Read in locked settings if supported

    long burstExposure = 0;
    long burstFrameDuration = 0;
    int burstSensitivity = 0;
    if (checkSensorSettings) {
      burstExposure = lockedResult.get(CaptureResult.SENSOR_EXPOSURE_TIME);
      burstFrameDuration = lockedResult.get(CaptureResult.SENSOR_FRAME_DURATION);
      burstSensitivity = lockedResult.get(CaptureResult.SENSOR_SENSITIVITY);

      assertTrue(
          String.format(
              "Cam %s: Frame duration %d ns too short compared to " + "exposure time %d ns",
              cameraId, burstFrameDuration, burstExposure),
          burstFrameDuration >= burstExposure);

      assertTrue(
          String.format("Cam %s: Exposure time is not valid: %d", cameraId, burstExposure),
          burstExposure > 0);
      assertTrue(
          String.format("Cam %s: Frame duration is not valid: %d", cameraId, burstFrameDuration),
          burstFrameDuration > 0);
      assertTrue(
          String.format("Cam %s: Sensitivity is not valid: %d", cameraId, burstSensitivity),
          burstSensitivity > 0);
    }

    // Process burst results
    int burstIndex = 0;
    CaptureResult burstResult =
        resultListener.getCaptureResultForRequest(burst.get(burstIndex), maxPipelineDepth + 1);
    long prevTimestamp = -1;
    final long frameDurationBound =
        (long) (minStillFrameDuration * (1 + FRAME_DURATION_MARGIN_FRACTION));

    List<Long> frameDurations = new ArrayList<>();

    while (true) {
      // Verify the result
      assertTrue(
          "Cam " + cameraId + ": Result doesn't match expected request",
          burstResult.getRequest() == burst.get(burstIndex));

      // Verify locked settings
      if (checkSensorSettings) {
        long exposure = burstResult.get(CaptureResult.SENSOR_EXPOSURE_TIME);
        int sensitivity = burstResult.get(CaptureResult.SENSOR_SENSITIVITY);
        assertTrue("Cam " + cameraId + ": Exposure not locked!", exposure == burstExposure);
        assertTrue(
            "Cam " + cameraId + ": Sensitivity not locked!", sensitivity == burstSensitivity);
      }

      // Collect inter-frame durations
      long timestamp = burstResult.get(CaptureResult.SENSOR_TIMESTAMP);
      if (prevTimestamp != -1) {
        long frameDuration = timestamp - prevTimestamp;
        frameDurations.add(frameDuration);
        if (DEBUG) {
          Log.i(
              TAG,
              String.format("Frame %03d    Duration %.2f ms", burstIndex, frameDuration / 1e6));
        }
      }
      prevTimestamp = timestamp;

      // Get next result
      burstIndex++;
      if (burstIndex == BURST_SIZE) break;
      burstResult = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS);
    }

    // Verify inter-frame durations

    long meanFrameSum = 0;
    for (Long duration : frameDurations) {
      meanFrameSum += duration;
    }
    float meanFrameDuration = (float) meanFrameSum / frameDurations.size();

    float stddevSum = 0;
    for (Long duration : frameDurations) {
      stddevSum += (duration - meanFrameDuration) * (duration - meanFrameDuration);
    }
    float stddevFrameDuration = (float) Math.sqrt(1.f / (frameDurations.size() - 1) * stddevSum);

    Log.i(
        TAG,
        String.format(
            "Cam %s: Burst frame duration mean: %.1f, stddev: %.1f",
            cameraId, meanFrameDuration, stddevFrameDuration));

    assertTrue(
        String.format(
            "Cam %s: Burst frame duration mean %.1f ns is larger than acceptable, "
                + "expecting below %d ns, allowing below %d",
            cameraId, meanFrameDuration, minStillFrameDuration, frameDurationBound),
        meanFrameDuration <= frameDurationBound);

    // Calculate upper 97.5% bound (assuming durations are normally distributed...)
    float limit95FrameDuration = meanFrameDuration + 2 * stddevFrameDuration;

    // Don't enforce this yet, but warn
    if (limit95FrameDuration > frameDurationBound) {
      Log.w(
          TAG,
          String.format(
              "Cam %s: Standard deviation is too large compared to limit: "
                  + "mean: %.1f ms, stddev: %.1f ms: 95%% bound: %f ms",
              cameraId,
              meanFrameDuration / 1e6,
              stddevFrameDuration / 1e6,
              limit95FrameDuration / 1e6));
    }
  }
 static Range<Long> longRangeFor(double v) {
   return Range.create((long) v, (long) Math.ceil(v));
 }
 static Range<Integer> intRangeFor(double v) {
   return Range.create((int) v, (int) Math.ceil(v));
 }
 static Range<Integer> alignRange(Range<Integer> range, int align) {
   return range.intersect(
       divUp(range.getLower(), align) * align, (range.getUpper() / align) * align);
 }
 /**
  * Returns the equivalent factored range {@code newrange}, where for every {@code e}: {@code
  * newrange.contains(e)} implies that {@code range.contains(e * factor)}, and {@code
  * !newrange.contains(e)} implies that {@code !range.contains(e * factor)}.
  */
 static Range<Long> factorRange(Range<Long> range, long factor) {
   if (factor == 1) {
     return range;
   }
   return Range.create(divUp(range.getLower(), factor), range.getUpper() / factor);
 }
 /**
  * Returns the equivalent factored range {@code newrange}, where for every {@code e}: {@code
  * newrange.contains(e)} implies that {@code range.contains(e * factor)}, and {@code
  * !newrange.contains(e)} implies that {@code !range.contains(e * factor)}.
  */
 static Range<Integer> factorRange(Range<Integer> range, int factor) {
   if (factor == 1) {
     return range;
   }
   return Range.create(divUp(range.getLower(), factor), range.getUpper() / factor);
 }
 private static int[] convertAeFpsRangeToLegacy(Range<Integer> fpsRange) {
   int[] legacyFps = new int[2];
   legacyFps[Parameters.PREVIEW_FPS_MIN_INDEX] = fpsRange.getLower();
   legacyFps[Parameters.PREVIEW_FPS_MAX_INDEX] = fpsRange.getUpper();
   return legacyFps;
 }
  /**
   * Set the legacy parameters using the {@link LegacyRequest legacy request}.
   *
   * <p>The legacy request's parameters are changed as a side effect of calling this method.
   *
   * @param legacyRequest a non-{@code null} legacy request
   */
  public static void convertRequestMetadata(LegacyRequest legacyRequest) {
    CameraCharacteristics characteristics = legacyRequest.characteristics;
    CaptureRequest request = legacyRequest.captureRequest;
    Size previewSize = legacyRequest.previewSize;
    Camera.Parameters params = legacyRequest.parameters;

    Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);

    /*
     * scaler.cropRegion
     */
    ParameterUtils.ZoomData zoomData;
    {
      zoomData =
          ParameterUtils.convertScalerCropRegion(
              activeArray, request.get(SCALER_CROP_REGION), previewSize, params);

      if (params.isZoomSupported()) {
        params.setZoom(zoomData.zoomIndex);
      } else if (VERBOSE) {
        Log.v(TAG, "convertRequestToMetadata - zoom is not supported");
      }
    }

    /*
     * colorCorrection.*
     */
    // colorCorrection.aberrationMode
    {
      int aberrationMode =
          ParamsUtils.getOrDefault(
              request,
              COLOR_CORRECTION_ABERRATION_MODE,
              /*defaultValue*/ COLOR_CORRECTION_ABERRATION_MODE_FAST);

      if (aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_FAST) {
        Log.w(
            TAG,
            "convertRequestToMetadata - Ignoring unsupported "
                + "colorCorrection.aberrationMode = "
                + aberrationMode);
      }
    }

    /*
     * control.ae*
     */
    // control.aeAntibandingMode
    {
      String legacyMode;
      Integer antiBandingMode = request.get(CONTROL_AE_ANTIBANDING_MODE);
      if (antiBandingMode != null) {
        legacyMode = convertAeAntiBandingModeToLegacy(antiBandingMode);
      } else {
        legacyMode =
            ListUtils.listSelectFirstFrom(
                params.getSupportedAntibanding(),
                new String[] {
                  Parameters.ANTIBANDING_AUTO,
                  Parameters.ANTIBANDING_OFF,
                  Parameters.ANTIBANDING_50HZ,
                  Parameters.ANTIBANDING_60HZ,
                });
      }

      if (legacyMode != null) {
        params.setAntibanding(legacyMode);
      }
    }

    /*
     * control.aeRegions, afRegions
     */
    {
      // aeRegions
      {
        // Use aeRegions if available, fall back to using awbRegions if present
        MeteringRectangle[] aeRegions = request.get(CONTROL_AE_REGIONS);
        if (request.get(CONTROL_AWB_REGIONS) != null) {
          Log.w(
              TAG,
              "convertRequestMetadata - control.awbRegions setting is not "
                  + "supported, ignoring value");
        }
        int maxNumMeteringAreas = params.getMaxNumMeteringAreas();
        List<Camera.Area> meteringAreaList =
            convertMeteringRegionsToLegacy(
                activeArray, zoomData, aeRegions, maxNumMeteringAreas, /*regionName*/ "AE");

        // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
        if (maxNumMeteringAreas > 0) {
          params.setMeteringAreas(meteringAreaList);
        }
      }

      // afRegions
      {
        MeteringRectangle[] afRegions = request.get(CONTROL_AF_REGIONS);
        int maxNumFocusAreas = params.getMaxNumFocusAreas();
        List<Camera.Area> focusAreaList =
            convertMeteringRegionsToLegacy(
                activeArray, zoomData, afRegions, maxNumFocusAreas, /*regionName*/ "AF");

        // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
        if (maxNumFocusAreas > 0) {
          params.setFocusAreas(focusAreaList);
        }
      }
    }

    // control.aeTargetFpsRange
    Range<Integer> aeFpsRange = request.get(CONTROL_AE_TARGET_FPS_RANGE);
    if (aeFpsRange != null) {
      int[] legacyFps = convertAeFpsRangeToLegacy(aeFpsRange);

      // TODO - Should we enforce that all HAL1 devices must include (30, 30) FPS range?
      boolean supported = false;
      for (int[] range : params.getSupportedPreviewFpsRange()) {
        if (legacyFps[0] == range[0] && legacyFps[1] == range[1]) {
          supported = true;
          break;
        }
      }
      if (supported) {
        params.setPreviewFpsRange(
            legacyFps[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
            legacyFps[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
      } else {
        Log.w(TAG, "Unsupported FPS range set [" + legacyFps[0] + "," + legacyFps[1] + "]");
      }
    }

    /*
     * control
     */

    // control.aeExposureCompensation
    {
      Range<Integer> compensationRange =
          characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
      int compensation =
          ParamsUtils.getOrDefault(request, CONTROL_AE_EXPOSURE_COMPENSATION, /*defaultValue*/ 0);

      if (!compensationRange.contains(compensation)) {
        Log.w(
            TAG,
            "convertRequestMetadata - control.aeExposureCompensation "
                + "is out of range, ignoring value");
        compensation = 0;
      }

      params.setExposureCompensation(compensation);
    }

    // control.aeLock
    {
      Boolean aeLock =
          getIfSupported(
              request,
              CONTROL_AE_LOCK, /*defaultValue*/
              false,
              params.isAutoExposureLockSupported(),
              /*allowedValue*/ false);

      if (aeLock != null) {
        params.setAutoExposureLock(aeLock);
      }

      if (VERBOSE) {
        Log.v(TAG, "convertRequestToMetadata - control.aeLock set to " + aeLock);
      }

      // TODO: Don't add control.aeLock to availableRequestKeys if it's not supported
    }

    // control.aeMode, flash.mode
    mapAeAndFlashMode(request, /*out*/ params);

    // control.afMode
    {
      int afMode =
          ParamsUtils.getOrDefault(request, CONTROL_AF_MODE, /*defaultValue*/ CONTROL_AF_MODE_OFF);
      String focusMode =
          LegacyMetadataMapper.convertAfModeToLegacy(afMode, params.getSupportedFocusModes());

      if (focusMode != null) {
        params.setFocusMode(focusMode);
      }

      if (VERBOSE) {
        Log.v(
            TAG, "convertRequestToMetadata - control.afMode " + afMode + " mapped to " + focusMode);
      }
    }

    // control.awbMode
    {
      Integer awbMode =
          getIfSupported(
              request,
              CONTROL_AWB_MODE,
              /*defaultValue*/ CONTROL_AWB_MODE_AUTO,
              params.getSupportedWhiteBalance() != null,
              /*allowedValue*/ CONTROL_AWB_MODE_AUTO);

      String whiteBalanceMode = null;
      if (awbMode != null) { // null iff AWB is not supported by camera1 api
        whiteBalanceMode = convertAwbModeToLegacy(awbMode);
        params.setWhiteBalance(whiteBalanceMode);
      }

      if (VERBOSE) {
        Log.v(
            TAG,
            "convertRequestToMetadata - control.awbMode "
                + awbMode
                + " mapped to "
                + whiteBalanceMode);
      }
    }

    // control.awbLock
    {
      Boolean awbLock =
          getIfSupported(
              request,
              CONTROL_AWB_LOCK, /*defaultValue*/
              false,
              params.isAutoWhiteBalanceLockSupported(),
              /*allowedValue*/ false);

      if (awbLock != null) {
        params.setAutoWhiteBalanceLock(awbLock);
      }

      // TODO: Don't add control.awbLock to availableRequestKeys if it's not supported
    }

    // control.captureIntent
    {
      int captureIntent =
          ParamsUtils.getOrDefault(
              request, CONTROL_CAPTURE_INTENT, /*defaultValue*/ CONTROL_CAPTURE_INTENT_PREVIEW);

      captureIntent = filterSupportedCaptureIntent(captureIntent);

      params.setRecordingHint(
          captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_RECORD
              || captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT);
    }

    // control.videoStabilizationMode
    {
      Integer stabMode =
          getIfSupported(
              request,
              CONTROL_VIDEO_STABILIZATION_MODE,
              /*defaultValue*/ CONTROL_VIDEO_STABILIZATION_MODE_OFF,
              params.isVideoStabilizationSupported(),
              /*allowedValue*/ CONTROL_VIDEO_STABILIZATION_MODE_OFF);

      if (stabMode != null) {
        params.setVideoStabilization(stabMode == CONTROL_VIDEO_STABILIZATION_MODE_ON);
      }
    }

    // lens.focusDistance
    {
      boolean infinityFocusSupported =
          ListUtils.listContains(params.getSupportedFocusModes(), Parameters.FOCUS_MODE_INFINITY);
      Float focusDistance =
          getIfSupported(
              request,
              LENS_FOCUS_DISTANCE,
              /*defaultValue*/ 0f,
              infinityFocusSupported, /*allowedValue*/
              0f);

      if (focusDistance == null || focusDistance != 0f) {
        Log.w(
            TAG,
            "convertRequestToMetadata - Ignoring android.lens.focusDistance "
                + infinityFocusSupported
                + ", only 0.0f is supported");
      }
    }

    // control.sceneMode, control.mode
    {
      // TODO: Map FACE_PRIORITY scene mode to face detection.

      if (params.getSupportedSceneModes() != null) {
        int controlMode =
            ParamsUtils.getOrDefault(request, CONTROL_MODE, /*defaultValue*/ CONTROL_MODE_AUTO);
        String modeToSet;
        switch (controlMode) {
          case CONTROL_MODE_USE_SCENE_MODE:
            {
              int sceneMode =
                  ParamsUtils.getOrDefault(
                      request, CONTROL_SCENE_MODE, /*defaultValue*/ CONTROL_SCENE_MODE_DISABLED);
              String legacySceneMode = LegacyMetadataMapper.convertSceneModeToLegacy(sceneMode);
              if (legacySceneMode != null) {
                modeToSet = legacySceneMode;
              } else {
                modeToSet = Parameters.SCENE_MODE_AUTO;
                Log.w(TAG, "Skipping unknown requested scene mode: " + sceneMode);
              }
              break;
            }
          case CONTROL_MODE_AUTO:
            {
              modeToSet = Parameters.SCENE_MODE_AUTO;
              break;
            }
          default:
            {
              Log.w(TAG, "Control mode " + controlMode + " is unsupported, defaulting to AUTO");
              modeToSet = Parameters.SCENE_MODE_AUTO;
            }
        }
        params.setSceneMode(modeToSet);
      }
    }

    // control.effectMode
    {
      if (params.getSupportedColorEffects() != null) {
        int effectMode =
            ParamsUtils.getOrDefault(
                request, CONTROL_EFFECT_MODE, /*defaultValue*/ CONTROL_EFFECT_MODE_OFF);
        String legacyEffectMode = LegacyMetadataMapper.convertEffectModeToLegacy(effectMode);
        if (legacyEffectMode != null) {
          params.setColorEffect(legacyEffectMode);
        } else {
          params.setColorEffect(Parameters.EFFECT_NONE);
          Log.w(TAG, "Skipping unknown requested effect mode: " + effectMode);
        }
      }
    }

    /*
     * sensor
     */

    // sensor.testPattern
    {
      int testPatternMode =
          ParamsUtils.getOrDefault(
              request, SENSOR_TEST_PATTERN_MODE, /*defaultValue*/ SENSOR_TEST_PATTERN_MODE_OFF);
      if (testPatternMode != SENSOR_TEST_PATTERN_MODE_OFF) {
        Log.w(
            TAG,
            "convertRequestToMetadata - ignoring sensor.testPatternMode "
                + testPatternMode
                + "; only OFF is supported");
      }
    }

    /*
     * jpeg.*
     */

    // jpeg.gpsLocation
    {
      Location location = request.get(JPEG_GPS_LOCATION);
      if (location != null) {
        if (checkForCompleteGpsData(location)) {
          params.setGpsAltitude(location.getAltitude());
          params.setGpsLatitude(location.getLatitude());
          params.setGpsLongitude(location.getLongitude());
          params.setGpsProcessingMethod(location.getProvider().toUpperCase());
          params.setGpsTimestamp(location.getTime());
        } else {
          Log.w(TAG, "Incomplete GPS parameters provided in location " + location);
        }
      } else {
        params.removeGpsData();
      }
    }

    // jpeg.orientation
    {
      Integer orientation = request.get(CaptureRequest.JPEG_ORIENTATION);
      params.setRotation(
          ParamsUtils.getOrDefault(
              request, JPEG_ORIENTATION, (orientation == null) ? 0 : orientation));
    }

    // jpeg.quality
    {
      params.setJpegQuality(
          0xFF & ParamsUtils.getOrDefault(request, JPEG_QUALITY, DEFAULT_JPEG_QUALITY));
    }

    // jpeg.thumbnailQuality
    {
      params.setJpegThumbnailQuality(
          0xFF & ParamsUtils.getOrDefault(request, JPEG_THUMBNAIL_QUALITY, DEFAULT_JPEG_QUALITY));
    }

    // jpeg.thumbnailSize
    {
      List<Camera.Size> sizes = params.getSupportedJpegThumbnailSizes();

      if (sizes != null && sizes.size() > 0) {
        Size s = request.get(JPEG_THUMBNAIL_SIZE);
        boolean invalidSize =
            (s == null) ? false : !ParameterUtils.containsSize(sizes, s.getWidth(), s.getHeight());
        if (invalidSize) {
          Log.w(TAG, "Invalid JPEG thumbnail size set " + s + ", skipping thumbnail...");
        }
        if (s == null || invalidSize) {
          // (0,0) = "no thumbnail" in Camera API 1
          params.setJpegThumbnailSize(/*width*/ 0, /*height*/ 0);
        } else {
          params.setJpegThumbnailSize(s.getWidth(), s.getHeight());
        }
      }
    }

    /*
     * noiseReduction.*
     */
    // noiseReduction.mode
    {
      int mode =
          ParamsUtils.getOrDefault(
              request, NOISE_REDUCTION_MODE, /*defaultValue*/ NOISE_REDUCTION_MODE_FAST);

      if (mode != NOISE_REDUCTION_MODE_FAST) {
        Log.w(
            TAG,
            "convertRequestToMetadata - Ignoring unsupported " + "noiseReduction.mode = " + mode);
      }
    }
  }