@Override
    public void marshal(ReprocessFormatsMap value, ByteBuffer buffer) {
      /*
       * // writing (static example, DNG+ZSL)
       * int32_t[] contents = {
       *   RAW_OPAQUE, 3, RAW16, YUV_420_888, BLOB,
       *   RAW16, 2, YUV_420_888, BLOB,
       *   ...,
       *   INPUT_FORMAT, OUTPUT_FORMAT_COUNT, [OUTPUT_0, OUTPUT_1, ..., OUTPUT_FORMAT_COUNT-1]
       * };
       */
      int[] inputs = StreamConfigurationMap.imageFormatToInternal(value.getInputs());
      for (int input : inputs) {
        // INPUT_FORMAT
        buffer.putInt(input);

        int[] outputs = StreamConfigurationMap.imageFormatToInternal(value.getOutputs(input));
        // OUTPUT_FORMAT_COUNT
        buffer.putInt(outputs.length);

        // [OUTPUT_0, OUTPUT_1, ..., OUTPUT_FORMAT_COUNT-1]
        for (int output : outputs) {
          buffer.putInt(output);
        }
      }
    }
Example #2
0
  private void caracteristicasCamera(
      int width, int height) { // metodo responsavel por atribuir as caracteristicas das cameras

    try {
      for (String cameraId : manager.getCameraIdList()) {
        CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

        StreamConfigurationMap map =
            characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        if (map == null) {
          continue;
        }

        // serve para atribuir o tamanho da imagem
        Size largest =
            Collections.max(
                Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
        imageReader =
            ImageReader.newInstance(
                largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/ 2);
        imageReader.setOnImageAvailableListener(mOnImageAvailableListener, backgroundHandler);

        // textureView.setAspectRatio(
        //       previewSize.getHeight(), previewSize.getWidth());

        return;
      }
    } catch (CameraAccessException e) {
      e.printStackTrace();
    } catch (NullPointerException e) {
      informaErro
          .newInstance(getString(R.string.camera_error))
          .show(getChildFragmentManager(), FRAGMENT_DIALOG);
    }
  }
  /**
   * Sets up member variables related to camera.
   *
   * @param width The width of available size for camera preview
   * @param height The height of available size for camera preview
   */
  private void setUpCameraOutputs(final int width, final int height) {
    final Activity activity = getActivity();
    final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
      for (final String cameraId : manager.getCameraIdList()) {
        final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

        // We don't use a front facing camera in this sample.
        final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
        if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
          continue;
        }

        final StreamConfigurationMap map =
            characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

        if (map == null) {
          continue;
        }

        // For still image captures, we use the largest available size.
        final Size largest =
            Collections.max(
                Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
                new CompareSizesByArea());

        sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);

        // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
        // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
        // garbage capture data.
        previewSize =
            chooseOptimalSize(
                map.getOutputSizes(SurfaceTexture.class), inputSize, inputSize, largest);

        // We fit the aspect ratio of TextureView to the size of preview we picked.
        final int orientation = getResources().getConfiguration().orientation;
        if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
          textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
        } else {
          textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
        }

        CameraConnectionFragment.this.cameraId = cameraId;

        cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation);
        return;
      }
    } catch (final CameraAccessException e) {
      LOGGER.e(e, "Exception!");
    } catch (final NullPointerException e) {
      // Currently an NPE is thrown when the Camera2API is used but not supported on the
      // device this code runs.
      ErrorDialog.newInstance(getString(R.string.camera_error))
          .show(getChildFragmentManager(), FRAGMENT_DIALOG);
    }
  }
  /**
   * Sets up member variables related to camera.
   *
   * @param width The width of available size for camera preview
   * @param height The height of available size for camera preview
   */
  private void setUpCameraOutputs(String strCameraId, int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
      for (String cameraId : manager.getCameraIdList()) {
        if (!cameraId.equalsIgnoreCase(strCameraId)) continue;

        CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

        // We don't use a front facing camera in this sample.
        Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
        if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
          //                    continue;
        }

        StreamConfigurationMap map =
            characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        if (map == null) {
          continue;
        }

        // For still image captures, we use the largest available size.
        Size largest =
            Collections.max(
                Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
        mImageReader =
            ImageReader.newInstance(
                largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/ 2);
        mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);

        // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
        // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
        // garbage capture data.
        mPreviewSize =
            chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, largest);

        // We fit the aspect ratio of TextureView to the size of preview we picked.
        int orientation = getResources().getConfiguration().orientation;
        if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
          mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
        } else {
          mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
        }

        mCameraId = cameraId;
        return;
      }
    } catch (CameraAccessException e) {
      e.printStackTrace();
    } catch (NullPointerException e) {
      // Currently an NPE is thrown when the Camera2API is used but not supported on the
      // device this code runs.
      ErrorDialog.newInstance(getString(R.string.camera_error))
          .show(getChildFragmentManager(), FRAGMENT_DIALOG);
    }
  }
  private void setupCamera(int width, int height) {

    try {
      if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
        throw new RuntimeException("Time out waiting to lock camera opening.");
      }
      for (String cameraId : cameraManager.getCameraIdList()) {
        CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
        if ((characteristics.get(CameraCharacteristics.LENS_FACING))
            .equals(CameraCharacteristics.LENS_FACING_BACK)) {
          StreamConfigurationMap configMap =
              characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
          Size largestImageSize =
              Collections.max(
                  Arrays.asList(configMap.getOutputSizes(ImageFormat.YUV_420_888)),
                  new Comparator<Size>() {
                    @Override
                    public int compare(Size lhs, Size rhs) {
                      return Long.signum(
                          lhs.getWidth() * lhs.getWidth() - rhs.getHeight() * rhs.getWidth());
                      // return 0;
                    }
                  });

          imgReader =
              ImageReader.newInstance(
                  largestImageSize.getWidth(),
                  largestImageSize.getHeight(),
                  ImageFormat.YUV_420_888,
                  60 * 600);
          imgReader.setOnImageAvailableListener(barcodeChecker, mHandler);
          prefferedBufferSize =
              getBestSize(configMap.getOutputSizes(SurfaceTexture.class), width, height);
          mCameraId = cameraId;
          return;
        }
      }

    } catch (Exception e) {
      e.printStackTrace();
    }
  }
  private void yuvBurstTestByCamera(String cameraId) throws Exception {
    // Parameters
    final int MAX_CONVERGENCE_FRAMES = 150; // 5 sec at 30fps
    final long MAX_PREVIEW_RESULT_TIMEOUT_MS = 1000;
    final int BURST_SIZE = 100;
    final float FRAME_DURATION_MARGIN_FRACTION = 0.1f;

    // Find a good preview size (bound to 1080p)
    final Size previewSize = mOrderedPreviewSizes.get(0);

    // Get maximum YUV_420_888 size
    final Size stillSize =
        getSortedSizesForFormat(cameraId, mCameraManager, ImageFormat.YUV_420_888, /*bound*/ null)
            .get(0);

    // Find max pipeline depth and sync latency
    final int maxPipelineDepth =
        mStaticInfo.getCharacteristics().get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH);
    final int maxSyncLatency =
        mStaticInfo.getCharacteristics().get(CameraCharacteristics.SYNC_MAX_LATENCY);

    // Find minimum frame duration for full-res YUV_420_888
    StreamConfigurationMap config =
        mStaticInfo.getCharacteristics().get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    final long minStillFrameDuration =
        config.getOutputMinFrameDuration(ImageFormat.YUV_420_888, stillSize);

    // Find suitable target FPS range - as high as possible
    List<Range<Integer>> fpsRanges =
        Arrays.asList(mStaticInfo.getAeAvailableTargetFpsRangesChecked());
    Range<Integer> targetRange = mStaticInfo.getAeMaxTargetFpsRange();
    // Add 0.05 here so Fps like 29.99 evaluated to 30
    int minBurstFps = (int) Math.floor(1e9 / minStillFrameDuration + 0.05f);
    boolean foundConstantMaxYUVRange = false;
    boolean foundYUVStreamingRange = false;

    for (Range<Integer> fpsRange : fpsRanges) {
      if (fpsRange.getLower() == minBurstFps && fpsRange.getUpper() == minBurstFps) {
        foundConstantMaxYUVRange = true;
      }
      if (fpsRange.getLower() <= 15 && fpsRange.getUpper() == minBurstFps) {
        foundYUVStreamingRange = true;
      }
    }

    assertTrue(
        String.format(
            "Cam %s: Target FPS range of (%d, %d) must be supported",
            cameraId, minBurstFps, minBurstFps),
        foundConstantMaxYUVRange);
    assertTrue(
        String.format(
            "Cam %s: Target FPS range of (x, %d) where x <= 15 must be supported",
            cameraId, minBurstFps),
        foundYUVStreamingRange);
    assertTrue(
        String.format(
            "Cam %s: No target FPS range found with minimum FPS above "
                + " 1/minFrameDuration (%d fps, duration %d ns) for full-resolution YUV",
            cameraId, minBurstFps, minStillFrameDuration),
        targetRange.getLower() >= minBurstFps);

    Log.i(
        TAG,
        String.format(
            "Selected frame rate range %d - %d for YUV burst",
            targetRange.getLower(), targetRange.getUpper()));

    // Check if READ_SENSOR_SETTINGS is supported
    final boolean checkSensorSettings =
        mStaticInfo.isCapabilitySupported(
            CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);

    // Configure basic preview and burst settings

    CaptureRequest.Builder previewBuilder =
        mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    CaptureRequest.Builder burstBuilder =
        mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);

    previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
    burstBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
    burstBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
    burstBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);

    // Create session and start up preview

    SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
    ImageDropperListener imageDropper = new ImageDropperListener();

    prepareCaptureAndStartPreview(
        previewBuilder,
        burstBuilder,
        previewSize,
        stillSize,
        ImageFormat.YUV_420_888,
        resultListener,
        /*maxNumImages*/ 3,
        imageDropper);

    // Create burst

    List<CaptureRequest> burst = new ArrayList<>();
    for (int i = 0; i < BURST_SIZE; i++) {
      burst.add(burstBuilder.build());
    }

    // Converge AE/AWB

    int frameCount = 0;
    while (true) {
      CaptureResult result = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS);
      int aeState = result.get(CaptureResult.CONTROL_AE_STATE);
      int awbState = result.get(CaptureResult.CONTROL_AWB_STATE);

      if (DEBUG) {
        Log.d(TAG, "aeState: " + aeState + ". awbState: " + awbState);
      }

      if ((aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED
              || aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED)
          && awbState == CaptureResult.CONTROL_AWB_STATE_CONVERGED) {
        break;
      }
      frameCount++;
      assertTrue(
          String.format(
              "Cam %s: Can not converge AE and AWB within %d frames",
              cameraId, MAX_CONVERGENCE_FRAMES),
          frameCount < MAX_CONVERGENCE_FRAMES);
    }

    // Lock AF if there's a focuser

    if (mStaticInfo.hasFocuser()) {
      previewBuilder.set(
          CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
      mSession.capture(previewBuilder.build(), resultListener, mHandler);
      previewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);

      frameCount = 0;
      while (true) {
        CaptureResult result = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS);
        int afState = result.get(CaptureResult.CONTROL_AF_STATE);

        if (DEBUG) {
          Log.d(TAG, "afState: " + afState);
        }

        if (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED
            || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
          break;
        }
        frameCount++;
        assertTrue(
            String.format(
                "Cam %s: Cannot lock AF within %d frames", cameraId, MAX_CONVERGENCE_FRAMES),
            frameCount < MAX_CONVERGENCE_FRAMES);
      }
    }

    // Lock AE/AWB

    previewBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
    previewBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);

    CaptureRequest lockedRequest = previewBuilder.build();
    mSession.setRepeatingRequest(lockedRequest, resultListener, mHandler);

    // Wait for first result with locking
    resultListener.drain();
    CaptureResult lockedResult =
        resultListener.getCaptureResultForRequest(lockedRequest, maxPipelineDepth);

    int pipelineDepth = lockedResult.get(CaptureResult.REQUEST_PIPELINE_DEPTH);

    // Then start waiting on results to get the first result that should be synced
    // up, and also fire the burst as soon as possible

    if (maxSyncLatency == CameraCharacteristics.SYNC_MAX_LATENCY_PER_FRAME_CONTROL) {
      // The locked result we have is already synchronized so start the burst
      mSession.captureBurst(burst, resultListener, mHandler);
    } else {
      // Need to get a synchronized result, and may need to start burst later to
      // be synchronized correctly

      boolean burstSent = false;

      // Calculate how many requests we need to still send down to camera before we
      // know the settings have settled for the burst

      int numFramesWaited = maxSyncLatency;
      if (numFramesWaited == CameraCharacteristics.SYNC_MAX_LATENCY_UNKNOWN) {
        numFramesWaited = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY;
      }

      int requestsNeededToSync = numFramesWaited - pipelineDepth;
      for (int i = 0; i < numFramesWaited; i++) {
        if (!burstSent && requestsNeededToSync <= 0) {
          mSession.captureBurst(burst, resultListener, mHandler);
          burstSent = true;
        }
        lockedResult = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS);
        requestsNeededToSync--;
      }

      assertTrue("Cam " + cameraId + ": Burst failed to fire!", burstSent);
    }

    // Read in locked settings if supported

    long burstExposure = 0;
    long burstFrameDuration = 0;
    int burstSensitivity = 0;
    if (checkSensorSettings) {
      burstExposure = lockedResult.get(CaptureResult.SENSOR_EXPOSURE_TIME);
      burstFrameDuration = lockedResult.get(CaptureResult.SENSOR_FRAME_DURATION);
      burstSensitivity = lockedResult.get(CaptureResult.SENSOR_SENSITIVITY);

      assertTrue(
          String.format(
              "Cam %s: Frame duration %d ns too short compared to " + "exposure time %d ns",
              cameraId, burstFrameDuration, burstExposure),
          burstFrameDuration >= burstExposure);

      assertTrue(
          String.format("Cam %s: Exposure time is not valid: %d", cameraId, burstExposure),
          burstExposure > 0);
      assertTrue(
          String.format("Cam %s: Frame duration is not valid: %d", cameraId, burstFrameDuration),
          burstFrameDuration > 0);
      assertTrue(
          String.format("Cam %s: Sensitivity is not valid: %d", cameraId, burstSensitivity),
          burstSensitivity > 0);
    }

    // Process burst results
    int burstIndex = 0;
    CaptureResult burstResult =
        resultListener.getCaptureResultForRequest(burst.get(burstIndex), maxPipelineDepth + 1);
    long prevTimestamp = -1;
    final long frameDurationBound =
        (long) (minStillFrameDuration * (1 + FRAME_DURATION_MARGIN_FRACTION));

    List<Long> frameDurations = new ArrayList<>();

    while (true) {
      // Verify the result
      assertTrue(
          "Cam " + cameraId + ": Result doesn't match expected request",
          burstResult.getRequest() == burst.get(burstIndex));

      // Verify locked settings
      if (checkSensorSettings) {
        long exposure = burstResult.get(CaptureResult.SENSOR_EXPOSURE_TIME);
        int sensitivity = burstResult.get(CaptureResult.SENSOR_SENSITIVITY);
        assertTrue("Cam " + cameraId + ": Exposure not locked!", exposure == burstExposure);
        assertTrue(
            "Cam " + cameraId + ": Sensitivity not locked!", sensitivity == burstSensitivity);
      }

      // Collect inter-frame durations
      long timestamp = burstResult.get(CaptureResult.SENSOR_TIMESTAMP);
      if (prevTimestamp != -1) {
        long frameDuration = timestamp - prevTimestamp;
        frameDurations.add(frameDuration);
        if (DEBUG) {
          Log.i(
              TAG,
              String.format("Frame %03d    Duration %.2f ms", burstIndex, frameDuration / 1e6));
        }
      }
      prevTimestamp = timestamp;

      // Get next result
      burstIndex++;
      if (burstIndex == BURST_SIZE) break;
      burstResult = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS);
    }

    // Verify inter-frame durations

    long meanFrameSum = 0;
    for (Long duration : frameDurations) {
      meanFrameSum += duration;
    }
    float meanFrameDuration = (float) meanFrameSum / frameDurations.size();

    float stddevSum = 0;
    for (Long duration : frameDurations) {
      stddevSum += (duration - meanFrameDuration) * (duration - meanFrameDuration);
    }
    float stddevFrameDuration = (float) Math.sqrt(1.f / (frameDurations.size() - 1) * stddevSum);

    Log.i(
        TAG,
        String.format(
            "Cam %s: Burst frame duration mean: %.1f, stddev: %.1f",
            cameraId, meanFrameDuration, stddevFrameDuration));

    assertTrue(
        String.format(
            "Cam %s: Burst frame duration mean %.1f ns is larger than acceptable, "
                + "expecting below %d ns, allowing below %d",
            cameraId, meanFrameDuration, minStillFrameDuration, frameDurationBound),
        meanFrameDuration <= frameDurationBound);

    // Calculate upper 97.5% bound (assuming durations are normally distributed...)
    float limit95FrameDuration = meanFrameDuration + 2 * stddevFrameDuration;

    // Don't enforce this yet, but warn
    if (limit95FrameDuration > frameDurationBound) {
      Log.w(
          TAG,
          String.format(
              "Cam %s: Standard deviation is too large compared to limit: "
                  + "mean: %.1f ms, stddev: %.1f ms: 95%% bound: %f ms",
              cameraId,
              meanFrameDuration / 1e6,
              stddevFrameDuration / 1e6,
              limit95FrameDuration / 1e6));
    }
  }
  /**
   * Sets up member variables related to camera.
   *
   * @param width The width of available size for camera preview
   * @param height The height of available size for camera preview
   */
  private void setUpCameraOutputs(int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
      for (String cameraId : manager.getCameraIdList()) {
        CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

        // We don't use a front facing camera in this sample.
        Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
        if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
          continue;
        }

        StreamConfigurationMap map =
            characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        if (map == null) {
          continue;
        }

        // For still image captures, we use the largest available size.
        Size largest =
            Collections.max(
                Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());
        mImageReader =
            ImageReader.newInstance(
                largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/ 2);
        mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);

        // Find out if we need to swap dimension to get the preview size relative to sensor
        // coordinate.
        int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
        //noinspection ConstantConditions
        mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
        boolean swappedDimensions = false;
        switch (displayRotation) {
          case Surface.ROTATION_0:
          case Surface.ROTATION_180:
            if (mSensorOrientation == 90 || mSensorOrientation == 270) {
              swappedDimensions = true;
            }
            break;
          case Surface.ROTATION_90:
          case Surface.ROTATION_270:
            if (mSensorOrientation == 0 || mSensorOrientation == 180) {
              swappedDimensions = true;
            }
            break;
          default:
            Log.e(TAG, "Display rotation is invalid: " + displayRotation);
        }

        Point displaySize = new Point();
        activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
        int rotatedPreviewWidth = width;
        int rotatedPreviewHeight = height;
        int maxPreviewWidth = displaySize.x;
        int maxPreviewHeight = displaySize.y;

        if (swappedDimensions) {
          rotatedPreviewWidth = height;
          rotatedPreviewHeight = width;
          maxPreviewWidth = displaySize.y;
          maxPreviewHeight = displaySize.x;
        }

        if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
          maxPreviewWidth = MAX_PREVIEW_WIDTH;
        }

        if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
          maxPreviewHeight = MAX_PREVIEW_HEIGHT;
        }

        // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
        // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
        // garbage capture data.
        mPreviewSize =
            chooseOptimalSize(
                map.getOutputSizes(SurfaceTexture.class),
                rotatedPreviewWidth,
                rotatedPreviewHeight,
                maxPreviewWidth,
                maxPreviewHeight,
                largest);

        // We fit the aspect ratio of TextureView to the size of preview we picked.
        int orientation = getResources().getConfiguration().orientation;
        if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
          mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
        } else {
          mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
        }

        // Check if the flash is supported.
        Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
        mFlashSupported = available == null ? false : available;

        mCameraId = cameraId;
        return;
      }
    } catch (CameraAccessException e) {
      e.printStackTrace();
    } catch (NullPointerException e) {
      // Currently an NPE is thrown when the Camera2API is used but not supported on the
      // device this code runs.
      ErrorDialog.newInstance(getString(R.string.camera_error))
          .show(getChildFragmentManager(), FRAGMENT_DIALOG);
    }
  }