public static void setBestPreviewFPS(Camera.Parameters parameters, int minFPS, int maxFPS) {
   List<int[]> supportedPreviewFpsRanges = parameters.getSupportedPreviewFpsRange();
   Log.i(TAG, "Supported FPS ranges: " + toString(supportedPreviewFpsRanges));
   if (supportedPreviewFpsRanges != null && !supportedPreviewFpsRanges.isEmpty()) {
     int[] suitableFPSRange = null;
     for (int[] fpsRange : supportedPreviewFpsRanges) {
       int thisMin = fpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
       int thisMax = fpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
       if (thisMin >= minFPS * 1000 && thisMax <= maxFPS * 1000) {
         suitableFPSRange = fpsRange;
         break;
       }
     }
     if (suitableFPSRange == null) {
       Log.i(TAG, "No suitable FPS range?");
     } else {
       int[] currentFpsRange = new int[2];
       parameters.getPreviewFpsRange(currentFpsRange);
       if (Arrays.equals(currentFpsRange, suitableFPSRange)) {
         Log.i(TAG, "FPS range already set to " + Arrays.toString(suitableFPSRange));
       } else {
         Log.i(TAG, "Setting FPS range to " + Arrays.toString(suitableFPSRange));
         parameters.setPreviewFpsRange(
             suitableFPSRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
             suitableFPSRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
       }
     }
   }
 }
예제 #2
0
  @CalledByNative
  public static CaptureFormat[] getDeviceSupportedFormats(int id) {
    Camera camera;
    try {
      camera = Camera.open(id);
    } catch (RuntimeException ex) {
      Log.e(TAG, "Camera.open: " + ex);
      return null;
    }
    Camera.Parameters parameters = camera.getParameters();

    ArrayList<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
    // getSupportedPreview{Formats,FpsRange,PreviewSizes}() returns Lists
    // with at least one element, but when the camera is in bad state, they
    // can return null pointers; in that case we use a 0 entry, so we can
    // retrieve as much information as possible.
    List<Integer> pixelFormats = parameters.getSupportedPreviewFormats();
    if (pixelFormats == null) {
      pixelFormats = new ArrayList<Integer>();
    }
    if (pixelFormats.size() == 0) {
      pixelFormats.add(ImageFormat.UNKNOWN);
    }
    for (Integer previewFormat : pixelFormats) {
      int pixelFormat = AndroidImageFormatList.ANDROID_IMAGEFORMAT_UNKNOWN;
      if (previewFormat == ImageFormat.YV12) {
        pixelFormat = AndroidImageFormatList.ANDROID_IMAGEFORMAT_YV12;
      } else if (previewFormat == ImageFormat.NV21) {
        continue;
      }

      List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
      if (listFpsRange == null) {
        listFpsRange = new ArrayList<int[]>();
      }
      if (listFpsRange.size() == 0) {
        listFpsRange.add(new int[] {0, 0});
      }
      for (int[] fpsRange : listFpsRange) {
        List<Camera.Size> supportedSizes = parameters.getSupportedPreviewSizes();
        if (supportedSizes == null) {
          supportedSizes = new ArrayList<Camera.Size>();
        }
        if (supportedSizes.size() == 0) {
          supportedSizes.add(camera.new Size(0, 0));
        }
        for (Camera.Size size : supportedSizes) {
          formatList.add(
              new CaptureFormat(size.width, size.height, (fpsRange[0] + 999) / 1000, pixelFormat));
        }
      }
    }
    camera.release();
    return formatList.toArray(new CaptureFormat[formatList.size()]);
  }
예제 #3
0
  @Override
  public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.main);

    tv_text1 = (TextView) findViewById(id.textView1);
    tv_values1 = (TextView) findViewById(id.textView2);
    tv_text2 = (TextView) findViewById(id.textView3);
    tv_values2 = (TextView) findViewById(id.textView4);
    tv_text3 = (TextView) findViewById(id.textView5);
    tv_values3 = (TextView) findViewById(id.textView6);
    tv_values1.setText("");
    tv_values2.setText("");
    preview = (FrameLayout) findViewById(id.camera_preview);

    mes = false;

    // Create an instance of Camera
    mCamera = getCameraInstance();
    CamTestActivity.setCameraDisplayOrientation(this, 1, mCamera);

    // -->GETTING THE POSSIBLE RANGE
    Camera.Parameters cp = mCamera.getParameters();
    List<int[]> l = cp.getSupportedPreviewFpsRange();
    if (l.size() == 1) {
      tv_text1.setText("FPSMin: ");
      tv_values1.setText(String.valueOf(l.get(0)[Parameters.PREVIEW_FPS_MIN_INDEX] / 1000));
      tv_text2.setText("FPSMax: ");
      tv_values2.setText(String.valueOf(l.get(0)[Parameters.PREVIEW_FPS_MAX_INDEX] / 1000));
    }
    mCamera.setParameters(cp);
    // <--GETTING THE POSSIBLE RANGE

    // Add a listener to the Capture button
    captureButton = (Button) findViewById(id.button_capture);
    captureButton.setText("Start");
    captureButton.setOnClickListener(
        new View.OnClickListener() {
          @Override
          public void onClick(View v) {
            changeMes();
          }
        });
  }
  /**
   * Gets a list of CameraFormat by id.
   *
   * <p>id follows:
   *
   * <ul>
   *   <li>{@link android.hardware.Camera.CameraInfo#CAMERA_FACING_BACK}
   *   <li>{@link android.hardware.Camera.CameraInfo#CAMERA_FACING_FRONT}
   * </ul>
   *
   * @param id id
   * @retur list of CameraFormat
   */
  public static List<CameraFormat> getSupportedFormats(final int id) {
    final ArrayList<CameraFormat> formatList = new ArrayList<>();

    Camera camera;
    try {
      camera = Camera.open(id);
    } catch (Exception e) {
      return formatList;
    }

    try {
      Camera.Parameters parameters = camera.getParameters();
      List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
      int[] range = {0, 0};
      if (listFpsRange != null) {
        range = listFpsRange.get(listFpsRange.size() - 1);
      }
      List<Camera.Size> supportedSizes = parameters.getSupportedPreviewSizes();
      for (Camera.Size size : supportedSizes) {
        formatList.add(
            new CameraFormat(
                size.width,
                size.height,
                id,
                range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] / 1000,
                range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] / 1000));
      }
    } catch (Exception e) {
      if (BuildConfig.DEBUG) {
        Log.w(TAG, "", e);
      }
    } finally {
      camera.release();
    }
    return formatList;
  }
  public void openCamera() {
    if (!isCreated || camera != null) {
      return;
    }

    // Open camera
    //
    try {
      camera = Camera.open(currentCameraId);
    } catch (NoSuchMethodError noSuchMethodError) {
      camera = Camera.open();
    }

    try {
      camera.setPreviewDisplay(getHolder());
      camera.setDisplayOrientation(90);
      camera.setPreviewCallback(cameraPreviewCallback);
    } catch (IOException ignore) {
      ignore.printStackTrace();
    } catch (NullPointerException ignore) {
      ignore.printStackTrace();
    }

    // get camera parameters
    //
    final Camera.Parameters parameters = camera.getParameters();

    // Get Preview Size and FPS
    //
    final List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
    final List<int[]> supportedPreviewFpsRange = parameters.getSupportedPreviewFpsRange();

    // set FPS
    //
    int[] chosenFPSRange = supportedPreviewFpsRange.get(0);
    for (int[] FPSRange : supportedPreviewFpsRange) {
      if (FPS > FPSRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX]
          && FPS < FPSRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]) {
        chosenFPSRange = FPSRange;
        break;
      }
    }
    parameters.setPreviewFpsRange(
        chosenFPSRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
        chosenFPSRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);

    // set Preview Size
    //
    if (frameSize != null) {
      parameters.setPreviewSize(frameSize.width, frameSize.height);
    } else {
      int firstElementWidth = supportedPreviewSizes.get(0).width;
      int lastElementWidth = supportedPreviewSizes.get(supportedPreviewSizes.size() - 1).width;
      Camera.Size minPreviewSize =
          (lastElementWidth > firstElementWidth)
              ? supportedPreviewSizes.get(0)
              : supportedPreviewSizes.get(supportedPreviewSizes.size() - 1);
      parameters.setPreviewSize(minPreviewSize.width, minPreviewSize.height);
    }

    // Set parameters and start preview
    //
    try {
      camera.setParameters(parameters);
      camera.startPreview();
    } catch (RuntimeException ignore) {
      ignore.printStackTrace();
    }
  }
예제 #6
0
  // Returns true on success, false otherwise.
  @CalledByNative
  public boolean allocate(int width, int height, int frameRate) {
    Log.d(TAG, "allocate: requested (" + width + "x" + height + ")@" + frameRate + "fps");
    try {
      mCamera = Camera.open(mId);
    } catch (RuntimeException ex) {
      Log.e(TAG, "allocate: Camera.open: " + ex);
      return false;
    }

    Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
    Camera.getCameraInfo(mId, cameraInfo);
    mCameraOrientation = cameraInfo.orientation;
    mCameraFacing = cameraInfo.facing;
    mDeviceOrientation = getDeviceOrientation();
    Log.d(
        TAG,
        "allocate: orientation dev="
            + mDeviceOrientation
            + ", cam="
            + mCameraOrientation
            + ", facing="
            + mCameraFacing);

    Camera.Parameters parameters = mCamera.getParameters();

    // getSupportedPreviewFpsRange() returns a List with at least one
    // element, but when camera is in bad state, it can return null pointer.
    List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
    if (listFpsRange == null || listFpsRange.size() == 0) {
      Log.e(TAG, "allocate: no fps range found");
      return false;
    }
    int frameRateInMs = frameRate * 1000;
    // Use the first range as default.
    int[] fpsMinMax = listFpsRange.get(0);
    int newFrameRate = (fpsMinMax[0] + 999) / 1000;
    for (int[] fpsRange : listFpsRange) {
      if (fpsRange[0] <= frameRateInMs && frameRateInMs <= fpsRange[1]) {
        fpsMinMax = fpsRange;
        newFrameRate = frameRate;
        break;
      }
    }
    frameRate = newFrameRate;
    Log.d(TAG, "allocate: fps set to " + frameRate);

    // Calculate size.
    List<Camera.Size> listCameraSize = parameters.getSupportedPreviewSizes();
    int minDiff = Integer.MAX_VALUE;
    int matchedWidth = width;
    int matchedHeight = height;
    for (Camera.Size size : listCameraSize) {
      int diff = Math.abs(size.width - width) + Math.abs(size.height - height);
      Log.d(TAG, "allocate: supported (" + size.width + ", " + size.height + "), diff=" + diff);
      // TODO(wjia): Remove this hack (forcing width to be multiple
      // of 32) by supporting stride in video frame buffer.
      // Right now, VideoCaptureController requires compact YV12
      // (i.e., with no padding).
      if (diff < minDiff && (size.width % 32 == 0)) {
        minDiff = diff;
        matchedWidth = size.width;
        matchedHeight = size.height;
      }
    }
    if (minDiff == Integer.MAX_VALUE) {
      Log.e(TAG, "allocate: can not find a multiple-of-32 resolution");
      return false;
    }

    mCaptureFormat =
        new CaptureFormat(matchedWidth, matchedHeight, frameRate, BuggyDeviceHack.getImageFormat());
    // Hack to avoid certain capture resolutions under a minimum one,
    // see http://crbug.com/305294
    BuggyDeviceHack.applyMinDimensions(mCaptureFormat);
    Log.d(TAG, "allocate: matched (" + mCaptureFormat.mWidth + "x" + mCaptureFormat.mHeight + ")");

    if (parameters.isVideoStabilizationSupported()) {
      Log.d(
          TAG,
          "Image stabilization supported, currently: "
              + parameters.getVideoStabilization()
              + ", setting it.");
      parameters.setVideoStabilization(true);
    } else {
      Log.d(TAG, "Image stabilization not supported.");
    }
    parameters.setPreviewSize(mCaptureFormat.mWidth, mCaptureFormat.mHeight);
    parameters.setPreviewFormat(mCaptureFormat.mPixelFormat);
    parameters.setPreviewFpsRange(fpsMinMax[0], fpsMinMax[1]);
    mCamera.setParameters(parameters);

    // Set SurfaceTexture. Android Capture needs a SurfaceTexture even if
    // it is not going to be used.
    mGlTextures = new int[1];
    // Generate one texture pointer and bind it as an external texture.
    GLES20.glGenTextures(1, mGlTextures, 0);
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]);
    // No mip-mapping with camera source.
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
    // Clamp to edge is only option.
    GLES20.glTexParameteri(
        GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(
        GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);

    mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
    mSurfaceTexture.setOnFrameAvailableListener(null);

    try {
      mCamera.setPreviewTexture(mSurfaceTexture);
    } catch (IOException ex) {
      Log.e(TAG, "allocate: " + ex);
      return false;
    }

    int bufSize =
        mCaptureFormat.mWidth
            * mCaptureFormat.mHeight
            * ImageFormat.getBitsPerPixel(mCaptureFormat.mPixelFormat)
            / 8;
    for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) {
      byte[] buffer = new byte[bufSize];
      mCamera.addCallbackBuffer(buffer);
    }
    mExpectedFrameSize = bufSize;

    return true;
  }
  /**
   * Set the legacy parameters using the {@link LegacyRequest legacy request}.
   *
   * <p>The legacy request's parameters are changed as a side effect of calling this method.
   *
   * @param legacyRequest a non-{@code null} legacy request
   */
  public static void convertRequestMetadata(LegacyRequest legacyRequest) {
    CameraCharacteristics characteristics = legacyRequest.characteristics;
    CaptureRequest request = legacyRequest.captureRequest;
    Size previewSize = legacyRequest.previewSize;
    Camera.Parameters params = legacyRequest.parameters;

    Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);

    /*
     * scaler.cropRegion
     */
    ParameterUtils.ZoomData zoomData;
    {
      zoomData =
          ParameterUtils.convertScalerCropRegion(
              activeArray, request.get(SCALER_CROP_REGION), previewSize, params);

      if (params.isZoomSupported()) {
        params.setZoom(zoomData.zoomIndex);
      } else if (VERBOSE) {
        Log.v(TAG, "convertRequestToMetadata - zoom is not supported");
      }
    }

    /*
     * colorCorrection.*
     */
    // colorCorrection.aberrationMode
    {
      int aberrationMode =
          ParamsUtils.getOrDefault(
              request,
              COLOR_CORRECTION_ABERRATION_MODE,
              /*defaultValue*/ COLOR_CORRECTION_ABERRATION_MODE_FAST);

      if (aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_FAST) {
        Log.w(
            TAG,
            "convertRequestToMetadata - Ignoring unsupported "
                + "colorCorrection.aberrationMode = "
                + aberrationMode);
      }
    }

    /*
     * control.ae*
     */
    // control.aeAntibandingMode
    {
      String legacyMode;
      Integer antiBandingMode = request.get(CONTROL_AE_ANTIBANDING_MODE);
      if (antiBandingMode != null) {
        legacyMode = convertAeAntiBandingModeToLegacy(antiBandingMode);
      } else {
        legacyMode =
            ListUtils.listSelectFirstFrom(
                params.getSupportedAntibanding(),
                new String[] {
                  Parameters.ANTIBANDING_AUTO,
                  Parameters.ANTIBANDING_OFF,
                  Parameters.ANTIBANDING_50HZ,
                  Parameters.ANTIBANDING_60HZ,
                });
      }

      if (legacyMode != null) {
        params.setAntibanding(legacyMode);
      }
    }

    /*
     * control.aeRegions, afRegions
     */
    {
      // aeRegions
      {
        // Use aeRegions if available, fall back to using awbRegions if present
        MeteringRectangle[] aeRegions = request.get(CONTROL_AE_REGIONS);
        if (request.get(CONTROL_AWB_REGIONS) != null) {
          Log.w(
              TAG,
              "convertRequestMetadata - control.awbRegions setting is not "
                  + "supported, ignoring value");
        }
        int maxNumMeteringAreas = params.getMaxNumMeteringAreas();
        List<Camera.Area> meteringAreaList =
            convertMeteringRegionsToLegacy(
                activeArray, zoomData, aeRegions, maxNumMeteringAreas, /*regionName*/ "AE");

        // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
        if (maxNumMeteringAreas > 0) {
          params.setMeteringAreas(meteringAreaList);
        }
      }

      // afRegions
      {
        MeteringRectangle[] afRegions = request.get(CONTROL_AF_REGIONS);
        int maxNumFocusAreas = params.getMaxNumFocusAreas();
        List<Camera.Area> focusAreaList =
            convertMeteringRegionsToLegacy(
                activeArray, zoomData, afRegions, maxNumFocusAreas, /*regionName*/ "AF");

        // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
        if (maxNumFocusAreas > 0) {
          params.setFocusAreas(focusAreaList);
        }
      }
    }

    // control.aeTargetFpsRange
    Range<Integer> aeFpsRange = request.get(CONTROL_AE_TARGET_FPS_RANGE);
    if (aeFpsRange != null) {
      int[] legacyFps = convertAeFpsRangeToLegacy(aeFpsRange);

      // TODO - Should we enforce that all HAL1 devices must include (30, 30) FPS range?
      boolean supported = false;
      for (int[] range : params.getSupportedPreviewFpsRange()) {
        if (legacyFps[0] == range[0] && legacyFps[1] == range[1]) {
          supported = true;
          break;
        }
      }
      if (supported) {
        params.setPreviewFpsRange(
            legacyFps[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
            legacyFps[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
      } else {
        Log.w(TAG, "Unsupported FPS range set [" + legacyFps[0] + "," + legacyFps[1] + "]");
      }
    }

    /*
     * control
     */

    // control.aeExposureCompensation
    {
      Range<Integer> compensationRange =
          characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
      int compensation =
          ParamsUtils.getOrDefault(request, CONTROL_AE_EXPOSURE_COMPENSATION, /*defaultValue*/ 0);

      if (!compensationRange.contains(compensation)) {
        Log.w(
            TAG,
            "convertRequestMetadata - control.aeExposureCompensation "
                + "is out of range, ignoring value");
        compensation = 0;
      }

      params.setExposureCompensation(compensation);
    }

    // control.aeLock
    {
      Boolean aeLock =
          getIfSupported(
              request,
              CONTROL_AE_LOCK, /*defaultValue*/
              false,
              params.isAutoExposureLockSupported(),
              /*allowedValue*/ false);

      if (aeLock != null) {
        params.setAutoExposureLock(aeLock);
      }

      if (VERBOSE) {
        Log.v(TAG, "convertRequestToMetadata - control.aeLock set to " + aeLock);
      }

      // TODO: Don't add control.aeLock to availableRequestKeys if it's not supported
    }

    // control.aeMode, flash.mode
    mapAeAndFlashMode(request, /*out*/ params);

    // control.afMode
    {
      int afMode =
          ParamsUtils.getOrDefault(request, CONTROL_AF_MODE, /*defaultValue*/ CONTROL_AF_MODE_OFF);
      String focusMode =
          LegacyMetadataMapper.convertAfModeToLegacy(afMode, params.getSupportedFocusModes());

      if (focusMode != null) {
        params.setFocusMode(focusMode);
      }

      if (VERBOSE) {
        Log.v(
            TAG, "convertRequestToMetadata - control.afMode " + afMode + " mapped to " + focusMode);
      }
    }

    // control.awbMode
    {
      Integer awbMode =
          getIfSupported(
              request,
              CONTROL_AWB_MODE,
              /*defaultValue*/ CONTROL_AWB_MODE_AUTO,
              params.getSupportedWhiteBalance() != null,
              /*allowedValue*/ CONTROL_AWB_MODE_AUTO);

      String whiteBalanceMode = null;
      if (awbMode != null) { // null iff AWB is not supported by camera1 api
        whiteBalanceMode = convertAwbModeToLegacy(awbMode);
        params.setWhiteBalance(whiteBalanceMode);
      }

      if (VERBOSE) {
        Log.v(
            TAG,
            "convertRequestToMetadata - control.awbMode "
                + awbMode
                + " mapped to "
                + whiteBalanceMode);
      }
    }

    // control.awbLock
    {
      Boolean awbLock =
          getIfSupported(
              request,
              CONTROL_AWB_LOCK, /*defaultValue*/
              false,
              params.isAutoWhiteBalanceLockSupported(),
              /*allowedValue*/ false);

      if (awbLock != null) {
        params.setAutoWhiteBalanceLock(awbLock);
      }

      // TODO: Don't add control.awbLock to availableRequestKeys if it's not supported
    }

    // control.captureIntent
    {
      int captureIntent =
          ParamsUtils.getOrDefault(
              request, CONTROL_CAPTURE_INTENT, /*defaultValue*/ CONTROL_CAPTURE_INTENT_PREVIEW);

      captureIntent = filterSupportedCaptureIntent(captureIntent);

      params.setRecordingHint(
          captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_RECORD
              || captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT);
    }

    // control.videoStabilizationMode
    {
      Integer stabMode =
          getIfSupported(
              request,
              CONTROL_VIDEO_STABILIZATION_MODE,
              /*defaultValue*/ CONTROL_VIDEO_STABILIZATION_MODE_OFF,
              params.isVideoStabilizationSupported(),
              /*allowedValue*/ CONTROL_VIDEO_STABILIZATION_MODE_OFF);

      if (stabMode != null) {
        params.setVideoStabilization(stabMode == CONTROL_VIDEO_STABILIZATION_MODE_ON);
      }
    }

    // lens.focusDistance
    {
      boolean infinityFocusSupported =
          ListUtils.listContains(params.getSupportedFocusModes(), Parameters.FOCUS_MODE_INFINITY);
      Float focusDistance =
          getIfSupported(
              request,
              LENS_FOCUS_DISTANCE,
              /*defaultValue*/ 0f,
              infinityFocusSupported, /*allowedValue*/
              0f);

      if (focusDistance == null || focusDistance != 0f) {
        Log.w(
            TAG,
            "convertRequestToMetadata - Ignoring android.lens.focusDistance "
                + infinityFocusSupported
                + ", only 0.0f is supported");
      }
    }

    // control.sceneMode, control.mode
    {
      // TODO: Map FACE_PRIORITY scene mode to face detection.

      if (params.getSupportedSceneModes() != null) {
        int controlMode =
            ParamsUtils.getOrDefault(request, CONTROL_MODE, /*defaultValue*/ CONTROL_MODE_AUTO);
        String modeToSet;
        switch (controlMode) {
          case CONTROL_MODE_USE_SCENE_MODE:
            {
              int sceneMode =
                  ParamsUtils.getOrDefault(
                      request, CONTROL_SCENE_MODE, /*defaultValue*/ CONTROL_SCENE_MODE_DISABLED);
              String legacySceneMode = LegacyMetadataMapper.convertSceneModeToLegacy(sceneMode);
              if (legacySceneMode != null) {
                modeToSet = legacySceneMode;
              } else {
                modeToSet = Parameters.SCENE_MODE_AUTO;
                Log.w(TAG, "Skipping unknown requested scene mode: " + sceneMode);
              }
              break;
            }
          case CONTROL_MODE_AUTO:
            {
              modeToSet = Parameters.SCENE_MODE_AUTO;
              break;
            }
          default:
            {
              Log.w(TAG, "Control mode " + controlMode + " is unsupported, defaulting to AUTO");
              modeToSet = Parameters.SCENE_MODE_AUTO;
            }
        }
        params.setSceneMode(modeToSet);
      }
    }

    // control.effectMode
    {
      if (params.getSupportedColorEffects() != null) {
        int effectMode =
            ParamsUtils.getOrDefault(
                request, CONTROL_EFFECT_MODE, /*defaultValue*/ CONTROL_EFFECT_MODE_OFF);
        String legacyEffectMode = LegacyMetadataMapper.convertEffectModeToLegacy(effectMode);
        if (legacyEffectMode != null) {
          params.setColorEffect(legacyEffectMode);
        } else {
          params.setColorEffect(Parameters.EFFECT_NONE);
          Log.w(TAG, "Skipping unknown requested effect mode: " + effectMode);
        }
      }
    }

    /*
     * sensor
     */

    // sensor.testPattern
    {
      int testPatternMode =
          ParamsUtils.getOrDefault(
              request, SENSOR_TEST_PATTERN_MODE, /*defaultValue*/ SENSOR_TEST_PATTERN_MODE_OFF);
      if (testPatternMode != SENSOR_TEST_PATTERN_MODE_OFF) {
        Log.w(
            TAG,
            "convertRequestToMetadata - ignoring sensor.testPatternMode "
                + testPatternMode
                + "; only OFF is supported");
      }
    }

    /*
     * jpeg.*
     */

    // jpeg.gpsLocation
    {
      Location location = request.get(JPEG_GPS_LOCATION);
      if (location != null) {
        if (checkForCompleteGpsData(location)) {
          params.setGpsAltitude(location.getAltitude());
          params.setGpsLatitude(location.getLatitude());
          params.setGpsLongitude(location.getLongitude());
          params.setGpsProcessingMethod(location.getProvider().toUpperCase());
          params.setGpsTimestamp(location.getTime());
        } else {
          Log.w(TAG, "Incomplete GPS parameters provided in location " + location);
        }
      } else {
        params.removeGpsData();
      }
    }

    // jpeg.orientation
    {
      Integer orientation = request.get(CaptureRequest.JPEG_ORIENTATION);
      params.setRotation(
          ParamsUtils.getOrDefault(
              request, JPEG_ORIENTATION, (orientation == null) ? 0 : orientation));
    }

    // jpeg.quality
    {
      params.setJpegQuality(
          0xFF & ParamsUtils.getOrDefault(request, JPEG_QUALITY, DEFAULT_JPEG_QUALITY));
    }

    // jpeg.thumbnailQuality
    {
      params.setJpegThumbnailQuality(
          0xFF & ParamsUtils.getOrDefault(request, JPEG_THUMBNAIL_QUALITY, DEFAULT_JPEG_QUALITY));
    }

    // jpeg.thumbnailSize
    {
      List<Camera.Size> sizes = params.getSupportedJpegThumbnailSizes();

      if (sizes != null && sizes.size() > 0) {
        Size s = request.get(JPEG_THUMBNAIL_SIZE);
        boolean invalidSize =
            (s == null) ? false : !ParameterUtils.containsSize(sizes, s.getWidth(), s.getHeight());
        if (invalidSize) {
          Log.w(TAG, "Invalid JPEG thumbnail size set " + s + ", skipping thumbnail...");
        }
        if (s == null || invalidSize) {
          // (0,0) = "no thumbnail" in Camera API 1
          params.setJpegThumbnailSize(/*width*/ 0, /*height*/ 0);
        } else {
          params.setJpegThumbnailSize(s.getWidth(), s.getHeight());
        }
      }
    }

    /*
     * noiseReduction.*
     */
    // noiseReduction.mode
    {
      int mode =
          ParamsUtils.getOrDefault(
              request, NOISE_REDUCTION_MODE, /*defaultValue*/ NOISE_REDUCTION_MODE_FAST);

      if (mode != NOISE_REDUCTION_MODE_FAST) {
        Log.w(
            TAG,
            "convertRequestToMetadata - Ignoring unsupported " + "noiseReduction.mode = " + mode);
      }
    }
  }