/** Lock the focus as the first step for a still image capture. */
  private void lockFocus() {
    try {
      // This is how to tell the camera to lock focus.
      mPreviewRequestBuilder.set(
          CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);

      if (isFlash) {
        mPreviewRequestBuilder.set(
            CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
        //                mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE,
        // CameraMetadata.FLASH_MODE_TORCH);
      } else {
        mPreviewRequestBuilder.set(
            CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
        //                mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE,
        // CameraMetadata.FLASH_MODE_OFF);
      }

      // Tell #mCaptureCallback to wait for the lock.
      mState = STATE_WAITING_LOCK;
      mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
    } catch (CameraAccessException e) {
      e.printStackTrace();
    }
  }
    private void capture(Session s) {
      try {
        CaptureRequest.Builder captureBuilder =
            s.cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);

        captureBuilder.addTarget(s.reader.getSurface());
        captureBuilder.set(
            CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
        captureBuilder.set(
            CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);

        Descriptor camera = (Descriptor) s.getDescriptor();
        CameraCharacteristics cc = mgr.getCameraCharacteristics(camera.cameraId);

        s.addToCaptureRequest(cc, camera.isFacingFront, captureBuilder);

        s.captureSession.stopRepeating();
        s.captureSession.capture(captureBuilder.build(), new CapturePictureTransaction(s), null);
      } catch (Exception e) {
        getBus().post(new PictureTakenEvent(e));

        if (isDebug()) {
          Log.e(getClass().getSimpleName(), "Exception running capture", e);
        }
      }
    }
 private void updateFlashlight(boolean forceDisable) {
   try {
     boolean enabled;
     synchronized (this) {
       enabled = mTorchEnabled && !forceDisable;
     }
     if (mUseCameraInterface) {
       if (enabled) {
         if (mCameraDevice == null) {
           startDevice();
           return;
         }
         if (mSession == null) {
           startSession();
           return;
         }
         if (mFlashlightRequest == null) {
           CaptureRequest.Builder builder =
               mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
           builder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH);
           builder.addTarget(mSurface);
           CaptureRequest request = builder.build();
           mSession.capture(request, null, mHandler);
           mFlashlightRequest = request;
         }
       } else {
         if (mCameraDevice != null) {
           mCameraDevice.close();
           teardownTorch();
         }
       }
     } else {
       // Devices with just a sysfs toggle
       if (mFlashDeviceWriter == null) {
         try {
           mFlashDeviceWriter = new FileWriter(mFlashDevice);
         } catch (IOException e) {
           Log.e(TAG, "Error creating new mFlashDeviceWriter");
           handleError();
         }
       }
       try {
         mFlashDeviceWriter.write(String.valueOf(enabled ? mValueOn : mValueOff));
         mFlashDeviceWriter.flush();
         if (!enabled) {
           mFlashDeviceWriter.close();
           mFlashDeviceWriter = null;
         }
       } catch (IOException e) {
         Log.e(TAG, "Error writing to flashlight sysfs", e);
         handleError();
       }
     }
   } catch (CameraAccessException | IllegalStateException | UnsupportedOperationException e) {
     Log.e(TAG, "Error in updateFlashlight", e);
     handleError();
   }
 }
示例#4
0
 private void desabilitaFoco() {
   try {
     previewRequestBuilder.set(
         CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
     captureSession.capture(previewRequestBuilder.build(), null, backgroundHandler);
     captureSession.setRepeatingRequest(previewRequest, null, backgroundHandler);
   } catch (CameraAccessException e) {
     e.printStackTrace();
   }
 }
示例#5
0
  public static void auto_focus(
      final CameraDevice cameraDevice,
      final ImageView imageviewer_new_picture,
      final String file_name)
      throws CameraAccessException {

    Log.e("camera", "start");

    SurfaceTexture mDummyPreview = new SurfaceTexture(1);
    Surface mDummySurface = new Surface(mDummyPreview);

    List<Surface> outputSurfaces = new ArrayList<Surface>(2);
    outputSurfaces.add(mDummySurface);

    final CaptureRequest.Builder captureRequestBuilder =
        cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    captureRequestBuilder.addTarget(mDummySurface);

    // Log.e("camera", "config capture");
    cameraDevice.createCaptureSession(
        outputSurfaces,
        new CameraCaptureSession.StateCallback() {
          @Override
          public void onConfigured(@NonNull CameraCaptureSession session) {
            try {
              session.capture(captureRequestBuilder.build(), null, null);
            } catch (CameraAccessException e) {
              e.printStackTrace();
            }
            // return_session = session;
            // try{ Thread.sleep(1000); }catch(InterruptedException e){ };
            // Log.e("camera", "start auto_focus");
            captureRequestBuilder.set(
                CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
            try {
              session.setRepeatingRequest(captureRequestBuilder.build(), null, null);
            } catch (CameraAccessException e) {
              e.printStackTrace();
            }

            try {
              Thread.sleep(500);
            } catch (InterruptedException e) {
            }
            ;
            // Log.e("camera", "start new foto");
            take_photo_new(cameraDevice, imageviewer_new_picture, file_name);
          }

          @Override
          public void onConfigureFailed(CameraCaptureSession session) {}
        },
        null);
  }
 private void lockFocus() {
   try {
     // This is how to tell the camera to lock focus.
     mCaptureReqBuilder.set(
         CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
     // Tell #mCaptureCallback to wait for the lock.
     mState = STATE_WAITING_LOCK;
     mCameraCaptureSession.capture(mCaptureReqBuilder.build(), mCameraSessionCallback, mHandler);
   } catch (CameraAccessException e) {
     e.printStackTrace();
   }
 }
 /**
  * Unlock the focus. This method should be called when still image capture sequence is finished.
  */
 private void unlockFocus() {
   try {
     // Reset the auto-focus trigger
     mPreviewRequestBuilder.set(
         CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
     mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
     // After this, the camera will go back to the normal state of preview.
     mState = STATE_PREVIEW;
     mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler);
   } catch (CameraAccessException e) {
     e.printStackTrace();
   }
 }
 /**
  * Run the precapture sequence for capturing a still image. This method should be called when we
  * get a response in {@link #mCaptureCallback} from {@link #lockFocus()}.
  */
 private void runPrecaptureSequence() {
   try {
     // This is how to tell the camera to trigger.
     mPreviewRequestBuilder.set(
         CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
         CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
     // Tell #mCaptureCallback to wait for the precapture sequence to be set.
     mState = STATE_WAITING_PRECAPTURE;
     mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
   } catch (CameraAccessException e) {
     e.printStackTrace();
   }
 }
    public void startPreview(List<Surface> outputSurfaces, CaptureCallback listener)
        throws Exception {
      mSessionListener = new BlockingSessionCallback();
      mSession = configureCameraSession(mCamera, outputSurfaces, mSessionListener, mHandler);

      // TODO: vary the different settings like crop region to cover more cases.
      CaptureRequest.Builder captureBuilder =
          mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);

      for (Surface surface : outputSurfaces) {
        captureBuilder.addTarget(surface);
      }
      mSession.setRepeatingRequest(captureBuilder.build(), listener, mHandler);
    }
示例#10
0
  @TargetApi(CAMERA_2_API_LIMIT)
  private void updateCamera2Preview() {
    mCamera2CaptureRequestBuilder.set(
        CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
    HandlerThread thread = new HandlerThread("CameraPreview");
    thread.start();
    Handler backgroundHandler = new Handler(thread.getLooper());

    try {
      mCamera2CaptureSession.setRepeatingRequest(
          mCamera2CaptureRequestBuilder.build(), null, backgroundHandler);
    } catch (CameraAccessException e) {
      e.printStackTrace();
    }
  }
示例#11
0
 @TargetApi(CAMERA_2_API_LIMIT)
 private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
   if (mFlashSupported) {
     requestBuilder.set(
         CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
   }
 }
示例#12
0
  /** Creates a new {@link CameraCaptureSession} for camera preview. */
  @TargetApi(CAMERA_2_API_LIMIT)
  private void createCameraPreviewSession() {
    try {
      SurfaceTexture texture = mTextureView.getSurfaceTexture();
      assert texture != null;

      // We configure the size of default buffer to be the size of camera preview we want.
      texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());

      // This is the output Surface we need to start preview.
      Surface surface = new Surface(texture);

      // We set up a CaptureRequest.Builder with the output Surface.
      mCamera2CaptureRequestBuilder =
          mCamera2Device.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
      mCamera2CaptureRequestBuilder.addTarget(surface);

      // Here, we create a CameraCaptureSession for camera preview.
      mCamera2Device.createCaptureSession(
          Arrays.asList(surface, mImageReader.getSurface()),
          new CameraCaptureSession.StateCallback() {

            @Override
            public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
              // The camera is already closed
              if (null == mCamera2Device) {
                return;
              }

              // When the session is ready, we start displaying the preview.
              mCamera2CaptureSession = cameraCaptureSession;
              try {
                // Auto focus should be continuous for camera preview.
                mCamera2CaptureRequestBuilder.set(
                    CaptureRequest.CONTROL_AF_MODE,
                    CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                // Flash is automatically enabled when necessary.
                setAutoFlash(mCamera2CaptureRequestBuilder);

                // Finally, we start displaying the camera preview.
                mCamera2CaptureRequest = mCamera2CaptureRequestBuilder.build();
                mCamera2CaptureSession.setRepeatingRequest(
                    mCamera2CaptureRequest, mCapture2Callback, mBackgroundHandler);
              } catch (CameraAccessException e) {
                e.printStackTrace();
              }
            }

            @Override
            public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
              Etils.showToast(CameraActivity.this, "Failed");
            }
          },
          null);
    } catch (CameraAccessException e) {
      e.printStackTrace();
    }
  }
 /**
  * Update capture request with configuration required for recording stream.
  *
  * @param requestBuilder Capture request builder that needs to be updated for recording specific
  *     camera settings.
  * @param detach Detach the recording surface from the capture request.
  */
 public synchronized void onConfiguringRequest(
     CaptureRequest.Builder requestBuilder, boolean detach) {
   if (detach) {
     // Can detach the surface in CONFIGURED and RECORDING state
     if (getStreamState() != STREAM_STATE_IDLE) {
       requestBuilder.removeTarget(mRecordingSurface);
     } else {
       Log.w(TAG, "Can not detach surface when recording stream is in IDLE state");
     }
   } else {
     // Can add surface only in CONFIGURED state.
     if (getStreamState() == STREAM_STATE_CONFIGURED) {
       requestBuilder.addTarget(mRecordingSurface);
     } else {
       Log.w(TAG, "Can only add surface when recording stream is in CONFIGURED state");
     }
   }
 }
  /**
   * Capture a still picture. This method should be called when we get a response in {@link
   * #mCaptureCallback} from both {@link #lockFocus()}.
   */
  private void captureStillPicture() {
    try {
      final Activity activity = getActivity();
      if (null == activity || null == mCameraDevice) {
        return;
      }
      // This is the CaptureRequest.Builder that we use to take a picture.
      final CaptureRequest.Builder captureBuilder =
          mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
      captureBuilder.addTarget(mImageReader.getSurface());

      // Use the same AE and AF modes as the preview.
      captureBuilder.set(
          CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);

      if (isFlash) {
        //                captureBuilder.set(CaptureRequest.FLASH_MODE,
        // CameraMetadata.FLASH_MODE_TORCH);
        //                captureBuilder.set(CaptureRequest.CONTROL_AE_MODE,
        //                        CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
        mPreviewRequestBuilder.set(
            CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);

      } else {
        //                captureBuilder.set(CaptureRequest.FLASH_MODE,
        // CameraMetadata.FLASH_MODE_OFF);
        captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF);
      }

      // Orientation
      int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
      captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));

      CameraCaptureSession.CaptureCallback CaptureCallback =
          new CameraCaptureSession.CaptureCallback() {

            @Override
            public void onCaptureCompleted(
                @NonNull CameraCaptureSession session,
                @NonNull CaptureRequest request,
                @NonNull TotalCaptureResult result) {
              showToast("Saved: " + mFile);
              Log.d(TAG, mFile.toString());
              unlockFocus();

              if (listener != null) {
                listener.onTakenPicture(mFile);
              }
            }
          };

      mCaptureSession.stopRepeating();
      mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
    } catch (CameraAccessException e) {
      e.printStackTrace();
    }
  }
示例#15
0
 private void unlockFocus() {
   try {
     // Reset the auto-focus trigger
     if (null != mCaptureReqBuilder
         && null != mCameraCaptureSession
         && mCameraSessionCallback != null) {
       mCaptureReqBuilder.set(
           CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
       mCaptureReqBuilder.set(
           CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
       mCameraCaptureSession.capture(mCaptureReqBuilder.build(), mCameraSessionCallback, mHandler);
       // After this, the camera will go back to the normal state of preview.
       mState = STATE_PREVIEW;
       mCameraCaptureSession.setRepeatingRequest(
           mCaptureRequest, mCameraSessionCallback, mHandler);
     }
   } catch (CameraAccessException e) {
     e.printStackTrace();
   }
 }
示例#16
0
  private void capturaFoto() {
    try {

      criaArquivosFotos();
      // verificaCaminhoFotos();
      final Activity activity = getActivity();
      if (null == activity || null == cameraDevice) {
        return;
      }

      final CaptureRequest.Builder captureBuilder =
          cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
      captureBuilder.addTarget(imageReader.getSurface());
      captureBuilder.set(
          CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);

      if (flashOn == true) {
        captureBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_TORCH);
      }
      int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
      if (cameraEscolhida == "0")
        captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, orientacoes.get(rotation));
      else captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, orientacoes.get(rotation) + 180);

      CameraCaptureSession.CaptureCallback CaptureCallback =
          new CameraCaptureSession.CaptureCallback() {

            @Override
            public void onCaptureCompleted(
                @NonNull CameraCaptureSession session,
                @NonNull CaptureRequest request,
                @NonNull TotalCaptureResult result) {
              desabilitaFoco();
            }
          };
      captureSession.stopRepeating();
      captureSession.capture(captureBuilder.build(), CaptureCallback, null);

      if (flagMudancaAutoCamera == true) {
        alteraCamera();
      }

    } catch (CameraAccessException e) {
      e.printStackTrace();
    }
  }
示例#17
0
  private void criaPreviewCamera() {
    try {
      SurfaceTexture texture = textureView.getSurfaceTexture();
      assert texture != null;
      Surface surface = new Surface(texture);
      previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
      previewRequestBuilder.addTarget(surface);
      cameraDevice.createCaptureSession(
          Arrays.asList(surface, imageReader.getSurface()),
          new CameraCaptureSession.StateCallback() {

            @Override
            public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {

              if (null == cameraDevice) {
                return;
              }
              captureSession = cameraCaptureSession;
              try {
                previewRequestBuilder.set(
                    CaptureRequest.CONTROL_AF_MODE,
                    CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                previewRequest = previewRequestBuilder.build();
                captureSession.setRepeatingRequest(previewRequest, null, backgroundHandler);
              } catch (CameraAccessException e) {
                e.printStackTrace();
              }
            }

            @Override
            public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {}
          },
          null);
    } catch (CameraAccessException e) {
      e.printStackTrace();
    }
  }
示例#18
0
  private void openCameraPreview() {
    try {
      SurfaceTexture surfaceTexture = cameraPreview.getSurfaceTexture();
      surfaceTexture.setDefaultBufferSize(
          prefferedBufferSize.getWidth(), prefferedBufferSize.getHeight());
      Surface theView = new Surface(surfaceTexture);
      mCaptureReqBuilder = theCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
      mCaptureReqBuilder.addTarget(theView);

      theCamera.createCaptureSession(
          Arrays.asList(theView, imgReader.getSurface()),
          new CameraCaptureSession.StateCallback() {
            @Override
            public void onConfigured(CameraCaptureSession session) {
              if (theCamera == null) return;
              // return at above line
              try {
                mCaptureRequest = mCaptureReqBuilder.build();
                mCameraCaptureSession = session;
                mCameraCaptureSession.setRepeatingRequest(
                    mCaptureRequest, mCameraSessionCallback, mHandler);

              } catch (Exception e) {
                e.printStackTrace();
              }
            }

            @Override
            public void onConfigureFailed(CameraCaptureSession session) {
              showTost("Session creation failed");
            }
          },
          null); // end of create capture session
    } catch (Exception e) {
    }
  }
  /**
   * Capture a still picture. This method should be called when we get a response in {@link
   * #mCaptureCallback} from both {@link #lockFocus()}.
   */
  private void captureStillPicture() {
    try {
      final Activity activity = getActivity();
      if (null == activity || null == mCameraDevice) {
        return;
      }
      // This is the CaptureRequest.Builder that we use to take a picture.
      final CaptureRequest.Builder captureBuilder =
          mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
      captureBuilder.addTarget(mImageReader.getSurface());

      // Use the same AE and AF modes as the preview.
      captureBuilder.set(
          CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
      captureBuilder.set(
          CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);

      // Orientation
      int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
      captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));

      CameraCaptureSession.CaptureCallback CaptureCallback =
          new CameraCaptureSession.CaptureCallback() {

            @Override
            public void onCaptureCompleted(
                CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
              unlockFocus();
              // On capturing send the file back to the previous activity
              Intent resultIntent = new Intent();
              resultIntent.putExtra("resultFilePath", mFile.getAbsolutePath());
              activity.setResult(Activity.RESULT_OK, resultIntent);
              activity.finish();
            }
          };

      mCaptureSession.stopRepeating();
      mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
    } catch (CameraAccessException e) {
      e.printStackTrace();
    }
  }
示例#20
0
  private void yuvBurstTestByCamera(String cameraId) throws Exception {
    // Parameters
    final int MAX_CONVERGENCE_FRAMES = 150; // 5 sec at 30fps
    final long MAX_PREVIEW_RESULT_TIMEOUT_MS = 1000;
    final int BURST_SIZE = 100;
    final float FRAME_DURATION_MARGIN_FRACTION = 0.1f;

    // Find a good preview size (bound to 1080p)
    final Size previewSize = mOrderedPreviewSizes.get(0);

    // Get maximum YUV_420_888 size
    final Size stillSize =
        getSortedSizesForFormat(cameraId, mCameraManager, ImageFormat.YUV_420_888, /*bound*/ null)
            .get(0);

    // Find max pipeline depth and sync latency
    final int maxPipelineDepth =
        mStaticInfo.getCharacteristics().get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH);
    final int maxSyncLatency =
        mStaticInfo.getCharacteristics().get(CameraCharacteristics.SYNC_MAX_LATENCY);

    // Find minimum frame duration for full-res YUV_420_888
    StreamConfigurationMap config =
        mStaticInfo.getCharacteristics().get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    final long minStillFrameDuration =
        config.getOutputMinFrameDuration(ImageFormat.YUV_420_888, stillSize);

    // Find suitable target FPS range - as high as possible
    List<Range<Integer>> fpsRanges =
        Arrays.asList(mStaticInfo.getAeAvailableTargetFpsRangesChecked());
    Range<Integer> targetRange = mStaticInfo.getAeMaxTargetFpsRange();
    // Add 0.05 here so Fps like 29.99 evaluated to 30
    int minBurstFps = (int) Math.floor(1e9 / minStillFrameDuration + 0.05f);
    boolean foundConstantMaxYUVRange = false;
    boolean foundYUVStreamingRange = false;

    for (Range<Integer> fpsRange : fpsRanges) {
      if (fpsRange.getLower() == minBurstFps && fpsRange.getUpper() == minBurstFps) {
        foundConstantMaxYUVRange = true;
      }
      if (fpsRange.getLower() <= 15 && fpsRange.getUpper() == minBurstFps) {
        foundYUVStreamingRange = true;
      }
    }

    assertTrue(
        String.format(
            "Cam %s: Target FPS range of (%d, %d) must be supported",
            cameraId, minBurstFps, minBurstFps),
        foundConstantMaxYUVRange);
    assertTrue(
        String.format(
            "Cam %s: Target FPS range of (x, %d) where x <= 15 must be supported",
            cameraId, minBurstFps),
        foundYUVStreamingRange);
    assertTrue(
        String.format(
            "Cam %s: No target FPS range found with minimum FPS above "
                + " 1/minFrameDuration (%d fps, duration %d ns) for full-resolution YUV",
            cameraId, minBurstFps, minStillFrameDuration),
        targetRange.getLower() >= minBurstFps);

    Log.i(
        TAG,
        String.format(
            "Selected frame rate range %d - %d for YUV burst",
            targetRange.getLower(), targetRange.getUpper()));

    // Check if READ_SENSOR_SETTINGS is supported
    final boolean checkSensorSettings =
        mStaticInfo.isCapabilitySupported(
            CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);

    // Configure basic preview and burst settings

    CaptureRequest.Builder previewBuilder =
        mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    CaptureRequest.Builder burstBuilder =
        mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);

    previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
    burstBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
    burstBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
    burstBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);

    // Create session and start up preview

    SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
    ImageDropperListener imageDropper = new ImageDropperListener();

    prepareCaptureAndStartPreview(
        previewBuilder,
        burstBuilder,
        previewSize,
        stillSize,
        ImageFormat.YUV_420_888,
        resultListener,
        /*maxNumImages*/ 3,
        imageDropper);

    // Create burst

    List<CaptureRequest> burst = new ArrayList<>();
    for (int i = 0; i < BURST_SIZE; i++) {
      burst.add(burstBuilder.build());
    }

    // Converge AE/AWB

    int frameCount = 0;
    while (true) {
      CaptureResult result = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS);
      int aeState = result.get(CaptureResult.CONTROL_AE_STATE);
      int awbState = result.get(CaptureResult.CONTROL_AWB_STATE);

      if (DEBUG) {
        Log.d(TAG, "aeState: " + aeState + ". awbState: " + awbState);
      }

      if ((aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED
              || aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED)
          && awbState == CaptureResult.CONTROL_AWB_STATE_CONVERGED) {
        break;
      }
      frameCount++;
      assertTrue(
          String.format(
              "Cam %s: Can not converge AE and AWB within %d frames",
              cameraId, MAX_CONVERGENCE_FRAMES),
          frameCount < MAX_CONVERGENCE_FRAMES);
    }

    // Lock AF if there's a focuser

    if (mStaticInfo.hasFocuser()) {
      previewBuilder.set(
          CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
      mSession.capture(previewBuilder.build(), resultListener, mHandler);
      previewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);

      frameCount = 0;
      while (true) {
        CaptureResult result = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS);
        int afState = result.get(CaptureResult.CONTROL_AF_STATE);

        if (DEBUG) {
          Log.d(TAG, "afState: " + afState);
        }

        if (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED
            || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
          break;
        }
        frameCount++;
        assertTrue(
            String.format(
                "Cam %s: Cannot lock AF within %d frames", cameraId, MAX_CONVERGENCE_FRAMES),
            frameCount < MAX_CONVERGENCE_FRAMES);
      }
    }

    // Lock AE/AWB

    previewBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
    previewBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);

    CaptureRequest lockedRequest = previewBuilder.build();
    mSession.setRepeatingRequest(lockedRequest, resultListener, mHandler);

    // Wait for first result with locking
    resultListener.drain();
    CaptureResult lockedResult =
        resultListener.getCaptureResultForRequest(lockedRequest, maxPipelineDepth);

    int pipelineDepth = lockedResult.get(CaptureResult.REQUEST_PIPELINE_DEPTH);

    // Then start waiting on results to get the first result that should be synced
    // up, and also fire the burst as soon as possible

    if (maxSyncLatency == CameraCharacteristics.SYNC_MAX_LATENCY_PER_FRAME_CONTROL) {
      // The locked result we have is already synchronized so start the burst
      mSession.captureBurst(burst, resultListener, mHandler);
    } else {
      // Need to get a synchronized result, and may need to start burst later to
      // be synchronized correctly

      boolean burstSent = false;

      // Calculate how many requests we need to still send down to camera before we
      // know the settings have settled for the burst

      int numFramesWaited = maxSyncLatency;
      if (numFramesWaited == CameraCharacteristics.SYNC_MAX_LATENCY_UNKNOWN) {
        numFramesWaited = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY;
      }

      int requestsNeededToSync = numFramesWaited - pipelineDepth;
      for (int i = 0; i < numFramesWaited; i++) {
        if (!burstSent && requestsNeededToSync <= 0) {
          mSession.captureBurst(burst, resultListener, mHandler);
          burstSent = true;
        }
        lockedResult = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS);
        requestsNeededToSync--;
      }

      assertTrue("Cam " + cameraId + ": Burst failed to fire!", burstSent);
    }

    // Read in locked settings if supported

    long burstExposure = 0;
    long burstFrameDuration = 0;
    int burstSensitivity = 0;
    if (checkSensorSettings) {
      burstExposure = lockedResult.get(CaptureResult.SENSOR_EXPOSURE_TIME);
      burstFrameDuration = lockedResult.get(CaptureResult.SENSOR_FRAME_DURATION);
      burstSensitivity = lockedResult.get(CaptureResult.SENSOR_SENSITIVITY);

      assertTrue(
          String.format(
              "Cam %s: Frame duration %d ns too short compared to " + "exposure time %d ns",
              cameraId, burstFrameDuration, burstExposure),
          burstFrameDuration >= burstExposure);

      assertTrue(
          String.format("Cam %s: Exposure time is not valid: %d", cameraId, burstExposure),
          burstExposure > 0);
      assertTrue(
          String.format("Cam %s: Frame duration is not valid: %d", cameraId, burstFrameDuration),
          burstFrameDuration > 0);
      assertTrue(
          String.format("Cam %s: Sensitivity is not valid: %d", cameraId, burstSensitivity),
          burstSensitivity > 0);
    }

    // Process burst results
    int burstIndex = 0;
    CaptureResult burstResult =
        resultListener.getCaptureResultForRequest(burst.get(burstIndex), maxPipelineDepth + 1);
    long prevTimestamp = -1;
    final long frameDurationBound =
        (long) (minStillFrameDuration * (1 + FRAME_DURATION_MARGIN_FRACTION));

    List<Long> frameDurations = new ArrayList<>();

    while (true) {
      // Verify the result
      assertTrue(
          "Cam " + cameraId + ": Result doesn't match expected request",
          burstResult.getRequest() == burst.get(burstIndex));

      // Verify locked settings
      if (checkSensorSettings) {
        long exposure = burstResult.get(CaptureResult.SENSOR_EXPOSURE_TIME);
        int sensitivity = burstResult.get(CaptureResult.SENSOR_SENSITIVITY);
        assertTrue("Cam " + cameraId + ": Exposure not locked!", exposure == burstExposure);
        assertTrue(
            "Cam " + cameraId + ": Sensitivity not locked!", sensitivity == burstSensitivity);
      }

      // Collect inter-frame durations
      long timestamp = burstResult.get(CaptureResult.SENSOR_TIMESTAMP);
      if (prevTimestamp != -1) {
        long frameDuration = timestamp - prevTimestamp;
        frameDurations.add(frameDuration);
        if (DEBUG) {
          Log.i(
              TAG,
              String.format("Frame %03d    Duration %.2f ms", burstIndex, frameDuration / 1e6));
        }
      }
      prevTimestamp = timestamp;

      // Get next result
      burstIndex++;
      if (burstIndex == BURST_SIZE) break;
      burstResult = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS);
    }

    // Verify inter-frame durations

    long meanFrameSum = 0;
    for (Long duration : frameDurations) {
      meanFrameSum += duration;
    }
    float meanFrameDuration = (float) meanFrameSum / frameDurations.size();

    float stddevSum = 0;
    for (Long duration : frameDurations) {
      stddevSum += (duration - meanFrameDuration) * (duration - meanFrameDuration);
    }
    float stddevFrameDuration = (float) Math.sqrt(1.f / (frameDurations.size() - 1) * stddevSum);

    Log.i(
        TAG,
        String.format(
            "Cam %s: Burst frame duration mean: %.1f, stddev: %.1f",
            cameraId, meanFrameDuration, stddevFrameDuration));

    assertTrue(
        String.format(
            "Cam %s: Burst frame duration mean %.1f ns is larger than acceptable, "
                + "expecting below %d ns, allowing below %d",
            cameraId, meanFrameDuration, minStillFrameDuration, frameDurationBound),
        meanFrameDuration <= frameDurationBound);

    // Calculate upper 97.5% bound (assuming durations are normally distributed...)
    float limit95FrameDuration = meanFrameDuration + 2 * stddevFrameDuration;

    // Don't enforce this yet, but warn
    if (limit95FrameDuration > frameDurationBound) {
      Log.w(
          TAG,
          String.format(
              "Cam %s: Standard deviation is too large compared to limit: "
                  + "mean: %.1f ms, stddev: %.1f ms: 95%% bound: %f ms",
              cameraId,
              meanFrameDuration / 1e6,
              stddevFrameDuration / 1e6,
              limit95FrameDuration / 1e6));
    }
  }
  /** Creates a new {@link CameraCaptureSession} for camera preview. */
  private void createCameraPreviewSession() {
    try {
      final SurfaceTexture texture = textureView.getSurfaceTexture();
      assert texture != null;

      // We configure the size of default buffer to be the size of camera preview we want.
      texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());

      // This is the output Surface we need to start preview.
      final Surface surface = new Surface(texture);

      // We set up a CaptureRequest.Builder with the output Surface.
      previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
      previewRequestBuilder.addTarget(surface);

      LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight());

      // Create the reader for the preview frames.
      previewReader =
          ImageReader.newInstance(
              previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);

      previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
      previewRequestBuilder.addTarget(previewReader.getSurface());

      // Here, we create a CameraCaptureSession for camera preview.
      cameraDevice.createCaptureSession(
          Arrays.asList(surface, previewReader.getSurface()),
          new CameraCaptureSession.StateCallback() {

            @Override
            public void onConfigured(final CameraCaptureSession cameraCaptureSession) {
              // The camera is already closed
              if (null == cameraDevice) {
                return;
              }

              // When the session is ready, we start displaying the preview.
              captureSession = cameraCaptureSession;
              try {
                // Auto focus should be continuous for camera preview.
                previewRequestBuilder.set(
                    CaptureRequest.CONTROL_AF_MODE,
                    CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                // Flash is automatically enabled when necessary.
                previewRequestBuilder.set(
                    CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);

                // Finally, we start displaying the camera preview.
                previewRequest = previewRequestBuilder.build();
                captureSession.setRepeatingRequest(
                    previewRequest, captureCallback, backgroundHandler);
              } catch (final CameraAccessException e) {
                LOGGER.e(e, "Exception!");
              }
            }

            @Override
            public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
              showToast("Failed");
            }
          },
          null);
    } catch (final CameraAccessException e) {
      LOGGER.e(e, "Exception!");
    }
  }
 private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
   if (mFlashSupported) {
     requestBuilder.set(
         CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
   }
 }
示例#23
0
  private static void take_photo_new(
      CameraDevice cameraDevice, ImageView imageviewer_new_picture, String file_name) {
    Context context_holder = ApplicationContextProvider.getContext();
    CameraManager manager = (CameraManager) context_holder.getSystemService(Context.CAMERA_SERVICE);

    HandlerThread mBackgroundThread;
    mBackgroundThread = new HandlerThread("Camera Background");
    mBackgroundThread.start();
    final Handler mBackgroundHandler = new Handler(mBackgroundThread.getLooper());

    try {
      CameraCharacteristics characteristics =
          manager.getCameraCharacteristics(cameraDevice.getId());

      Size[] jpegSizes = null;
      if (characteristics != null) {
        jpegSizes =
            characteristics
                .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
                .getOutputSizes(ImageFormat.JPEG);
      }
      int width = 640;
      int height = 480;
      if (jpegSizes != null && 0 < jpegSizes.length) {
        width = jpegSizes[0].getWidth();
        height = jpegSizes[0].getHeight();
      }

      ImageReader reader = ImageReader.newInstance((width + 1), (height + 1), ImageFormat.JPEG, 1);

      final CaptureRequest.Builder captureBuilder =
          cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
      captureBuilder.addTarget(reader.getSurface());
      captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
      captureBuilder.set(
          CaptureRequest.COLOR_CORRECTION_MODE, CameraMetadata.COLOR_CORRECTION_MODE_FAST);
      captureBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);

      captureBuilder.set(CaptureRequest.JPEG_THUMBNAIL_QUALITY, null);

      List<Surface> outputSurfaces = new ArrayList<Surface>(2);
      outputSurfaces.add(reader.getSurface());

      captureBuilder.build();

      cameraDevice.createCaptureSession(
          outputSurfaces,
          new CameraCaptureSession.StateCallback() {
            @Override
            public void onConfigured(CameraCaptureSession session) {
              try {
                session.capture(captureBuilder.build(), null, null);
              } catch (CameraAccessException e) {
                e.printStackTrace();
              }
            }

            @Override
            public void onConfigureFailed(CameraCaptureSession session) {}
          },
          mBackgroundHandler);

      Log.e("camera", "take picture... sleep");

      try {
        Thread.sleep(2000);
      } catch (InterruptedException e) {
      }

      byte[] bytes;
      Image image = null;
      try {
        //                Log.e("camera", " reader"+reader.getHeight());
        image = reader.acquireLatestImage();
        image.getFormat();
        ByteBuffer buffer = image.getPlanes()[0].getBuffer();
        bytes = new byte[buffer.capacity()];
        buffer.get(bytes);
      } finally {
        if (image != null) {
          image.close();
        }
      }

      // String file_name = "curent_photo";
      FileOutputStream out_put_stream = null;
      try {
        out_put_stream =
            ApplicationContextProvider.getContext()
                .openFileOutput(
                    file_name + ".jpg", ApplicationContextProvider.getContext().MODE_PRIVATE);
      } catch (IOException exception) {
        exception.printStackTrace();
      }
      try {
        out_put_stream.write(bytes);
        Log.e("camera", "saved picture! " + file_name);

        if (imageviewer_new_picture != null) {
          ImageView cap_picture = (ImageView) imageviewer_new_picture;
          Bitmap bitmap_found = FileIO.loadBMPPrivate(file_name, Bitmap.CompressFormat.JPEG);
          cap_picture.setImageBitmap(bitmap_found);
        }

      } catch (IOException e) {
        e.printStackTrace();
      } finally {
        if (null != out_put_stream) {
          try {
            out_put_stream.close();
          } catch (IOException e) {
            e.printStackTrace();
          }
        }
        closeCamera(cameraDevice);
      }
    } catch (CameraAccessException e) {
      e.printStackTrace();
    }
  }
示例#24
0
  /** Capture a still picture. This method should be called when we get a response in */
  @TargetApi(CAMERA_2_API_LIMIT)
  private void captureStillPicture() {
    try {
      final Activity activity = CameraActivity.this;
      if (activity == null || mCamera2Device == null) {
        return;
      }

      mFlashCaptureAnimationView.setVisibility(View.VISIBLE);
      mFlashCaptureSpring.setCurrentValue(0);
      mFlashCaptureSpring.setEndValue(1.0f);

      mTextureView.setDrawingCacheEnabled(true);
      Bitmap cameraPreview = mTextureView.getBitmap();
      if (cameraPreview == null) {
        Etils.showToast(CameraActivity.this, "Camera preview bitmap null");
      }
      mPreviewCapture.setImageBitmap(cameraPreview);
      mPreviewCapture
          .animate()
          .alpha(1.0f)
          .setInterpolator(FAST_OUT_SLOW_IN_INTERPOLATOR)
          .setDuration(1500)
          .withEndAction(
              new Runnable() {
                @Override
                public void run() {
                  mPreviewCapture
                      .animate()
                      .alpha(0.0f)
                      .setInterpolator(FAST_OUT_SLOW_IN_INTERPOLATOR)
                      .scaleX(0.3f)
                      .scaleY(0.3f)
                      .translationX(Etils.dpToPx(300))
                      .translationY(Etils.dpToPx(500))
                      .withEndAction(
                          new Runnable() {
                            @Override
                            public void run() {
                              mPreviewCapture.setScaleX(1.0f);
                              mPreviewCapture.setScaleY(1.0f);
                              mPreviewCapture.setTranslationX(1.0f);
                              mPreviewCapture.setTranslationY(1.0f);
                            }
                          });
                }
              });

      // This is the CaptureRequest.Builder that we use to take a picture.
      final CaptureRequest.Builder captureBuilder =
          mCamera2Device.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
      captureBuilder.addTarget(mImageReader.getSurface());

      // Use the same AE and AF modes as the preview.
      captureBuilder.set(
          CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
      setAutoFlash(captureBuilder);

      // Orientation
      int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
      captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));

      Date date = new Date();
      SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH.mm.ss");
      final String timestampFilename = dateFormat.format(date);
      mFile = new File(getExternalFilesDir(null), timestampFilename + ".jpg");
      CameraCaptureSession.CaptureCallback CaptureCallback =
          new CameraCaptureSession.CaptureCallback() {
            @Override
            public void onCaptureCompleted(
                @NonNull CameraCaptureSession session,
                @NonNull CaptureRequest request,
                @NonNull TotalCaptureResult result) {
              lawg.d("Saved file: " + mFile.toString());
              unlockFocus();
            }
          };

      mCamera2CaptureSession.stopRepeating();
      mCamera2CaptureSession.capture(captureBuilder.build(), CaptureCallback, mBackgroundHandler);

    } catch (CameraAccessException e) {
      e.printStackTrace();
    }
  }