@Override public synchronized void start() { theCamera = Camera.open(); Camera.Parameters myParameters = theCamera.getParameters(); myParameters.setPreviewSize(theCamPreviewWidth, theCamPreviewHeight); myParameters.setPreviewFpsRange(MIN_FPS, MAX_FPS); myParameters.setSceneMode(SCENE_MODE_NIGHT); myParameters.setFocusMode(FOCUS_MODE_AUTO); myParameters.setWhiteBalance(WHITE_BALANCE_AUTO); theCamera.setParameters(myParameters); theCamera.setDisplayOrientation(DISPLAY_ORIENTATION); }
public static void setBestPreviewFPS(Camera.Parameters parameters, int minFPS, int maxFPS) { List<int[]> supportedPreviewFpsRanges = parameters.getSupportedPreviewFpsRange(); Log.i(TAG, "Supported FPS ranges: " + toString(supportedPreviewFpsRanges)); if (supportedPreviewFpsRanges != null && !supportedPreviewFpsRanges.isEmpty()) { int[] suitableFPSRange = null; for (int[] fpsRange : supportedPreviewFpsRanges) { int thisMin = fpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX]; int thisMax = fpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]; if (thisMin >= minFPS * 1000 && thisMax <= maxFPS * 1000) { suitableFPSRange = fpsRange; break; } } if (suitableFPSRange == null) { Log.i(TAG, "No suitable FPS range?"); } else { int[] currentFpsRange = new int[2]; parameters.getPreviewFpsRange(currentFpsRange); if (Arrays.equals(currentFpsRange, suitableFPSRange)) { Log.i(TAG, "FPS range already set to " + Arrays.toString(suitableFPSRange)); } else { Log.i(TAG, "Setting FPS range to " + Arrays.toString(suitableFPSRange)); parameters.setPreviewFpsRange( suitableFPSRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], suitableFPSRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); } } } }
/** 给摄像头设置好一系列相关参数,并开始预览 */ private void initCamera() { azLog.log("isPreView", "" + isPreview); if (mCamera != null && !isPreview) { try { Camera.Parameters parameters = mCamera.getParameters(); // 设置预览照片的大小 parameters.setPreviewSize(mPreviewWidth, mPreviewHeight); // 设置预览照片时每秒显示多少帧的最小值和最大值 parameters.setPreviewFpsRange(mFrameMin, mFrameMax); // 设置图片格式 parameters.setPictureFormat(mImageFormat); // 设置JPG照片的质量 parameters.set("jpeg-quality", mImageQuality); // 设置照片的大小 parameters.setPictureSize(mPictureWidth, mPictureHeight); // 通过SurfaceView显示取景画面 mCamera.setPreviewDisplay(mSurfaceHolder); // 开始预览 mCamera.startPreview(); azLog.log("AzCamera", "摄像头打开"); } catch (IOException e) { e.printStackTrace(); } isPreview = true; } }
private void initCamera() { if (!isPreview) { camera = Camera.open(); } if (camera != null && !isPreview) { streamIt = new StreamIt(); try { Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(screenWidth, screenHeight); // 设置预览照片的大小 parameters.setPreviewFpsRange(20, 30); // 每秒显示20~30帧 parameters.setPictureFormat(ImageFormat.NV21); // 设置图片格式 parameters.setPictureSize(screenWidth, screenHeight); // 设置照片的大小 camera.setPreviewDisplay(surfaceHolder); // 通过SurfaceView显示取景画面 camera.setPreviewCallback(streamIt); // 设置回调的类 camera.startPreview(); // 开始预览 camera.autoFocus(null); // 自动对焦 } catch (Exception e) { e.printStackTrace(); } isPreview = true; Thread postThread2 = new Thread(new postThread()); postThread2.start(); System.out.println("线程启动了@@@@@@@@@@@@@@@@@@@@@@@@@@"); } }
// https://code.google.com/p/google-glass-api/issues/detail?id=232 public void googleGlassXE10WorkAround(Camera mCamera) { Camera.Parameters params = mCamera.getParameters(); params.setPreviewFpsRange(30000, 30000); params.setPreviewSize(640, 360); mCamera.setParameters(params); mCamera.setZoomChangeListener(this); }
private void SetDesiredCameraParameter() { if (mCamera == null) return; Camera.Parameters parameters = mCamera.getParameters(); parameters.setPreviewFpsRange(30000, 30000); mCamera.setParameters(parameters); }
// Called by native code. Returns true if capturer is started. // // Note that this actually opens the camera, which can be a slow operation and // thus might be done on a background thread, but ViE API needs a // synchronous success return value so we can't do that. private synchronized boolean startCapture(int width, int height, int min_mfps, int max_mfps) { Log.d(TAG, "startCapture: " + width + "x" + height + "@" + min_mfps + ":" + max_mfps); Throwable error = null; try { camera = Camera.open(id); localPreview = ViERenderer.GetLocalRenderer(); if (localPreview != null) { localPreview.addCallback(this); if (localPreview.getSurface() != null && localPreview.getSurface().isValid()) { camera.setPreviewDisplay(localPreview); } } else { // No local renderer (we only care about onPreviewFrame() buffers, not a // directly-displayed UI element). Camera won't capture without // setPreview{Texture,Display}, so we create a dummy SurfaceTexture and // hand it over to Camera, but never listen for frame-ready callbacks, // and never call updateTexImage on it. try { // "42" because http://goo.gl/KaEn8 dummySurfaceTexture = new SurfaceTexture(42); camera.setPreviewTexture(dummySurfaceTexture); } catch (IOException e) { throw new RuntimeException(e); } } Camera.Parameters parameters = camera.getParameters(); Log.d(TAG, "isVideoStabilizationSupported: " + parameters.isVideoStabilizationSupported()); if (parameters.isVideoStabilizationSupported()) { parameters.setVideoStabilization(true); } parameters.setPreviewSize(width, height); parameters.setPreviewFpsRange(min_mfps, max_mfps); int format = ImageFormat.NV21; parameters.setPreviewFormat(format); camera.setParameters(parameters); int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; for (int i = 0; i < numCaptureBuffers; i++) { camera.addCallbackBuffer(new byte[bufSize]); } camera.setPreviewCallbackWithBuffer(this); camera.startPreview(); return true; } catch (IOException e) { error = e; } catch (RuntimeException e) { error = e; } Log.e(TAG, "startCapture failed", error); if (camera != null) { stopCapture(); } return false; }
/** * Opens the camera and applies the user settings. * * @throws RuntimeException if the method fails */ @SuppressLint("InlinedApi") private Camera createCamera() { int requestedCameraId = getIdForRequestedCamera(mFacing); if (requestedCameraId == -1) { throw new RuntimeException(mContext.getString(R.string.no_requested_camera)); } Camera camera = Camera.open(requestedCameraId); SizePair sizePair = selectSizePair(camera, mRequestedPreviewWidth, mRequestedPreviewHeight, mContext); if (sizePair == null) { throw new RuntimeException(mContext.getString(R.string.no_suitable_preview_size)); } Size pictureSize = sizePair.pictureSize(); mPreviewSize = sizePair.previewSize(); int[] previewFpsRange = selectPreviewFpsRange(camera, mRequestedFps); if (previewFpsRange == null) { throw new RuntimeException(mContext.getString(R.string.no_suitable_frames_per_second)); } Camera.Parameters parameters = camera.getParameters(); parameters.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight()); parameters.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); parameters.setPreviewFpsRange( previewFpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], previewFpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); parameters.setPreviewFormat(ImageFormat.NV21); setRotation(camera, parameters, requestedCameraId); if (mFocusMode != null) { if (parameters.getSupportedFocusModes().contains(mFocusMode)) { parameters.setFocusMode(mFocusMode); } else { Log.i( TAG, mContext.getString(R.string.camera_focus_mode) + mFocusMode + mContext.getString(R.string.not_supported)); } } // setting mFocusMode to the one set in the params mFocusMode = parameters.getFocusMode(); if (mFlashMode != null) { if (parameters.getSupportedFlashModes().contains(mFlashMode)) { parameters.setFlashMode(mFlashMode); } else { Log.i( TAG, mContext.getString(R.string.flash_mode) + mFlashMode + mContext.getString(R.string.not_supported)); } } // setting mFlashMode to the one set in the params mFlashMode = parameters.getFlashMode(); camera.setParameters(parameters); // Four frame buffers are needed for working with the camera: // // one for the frame that is currently being executed upon in doing detection // one for the next pending frame to process immediately upon completing detection // two for the frames that the camera uses to populate future preview images camera.setPreviewCallbackWithBuffer(new CameraPreviewCallback()); camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize)); camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize)); camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize)); camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize)); return camera; }
// (Re)start preview with the closest supported format to |width| x |height| @ |framerate|. private void startPreviewOnCameraThread(int width, int height, int framerate) { checkIsOnCameraThread(); Logging.d( TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "@" + framerate); if (camera == null) { Logging.e(TAG, "Calling startPreviewOnCameraThread on stopped camera."); return; } requestedWidth = width; requestedHeight = height; requestedFramerate = framerate; // Find closest supported format for |width| x |height| @ |framerate|. final Camera.Parameters parameters = camera.getParameters(); final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, framerate * 1000); final Camera.Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize( parameters.getSupportedPreviewSizes(), width, height); final CaptureFormat captureFormat = new CaptureFormat( previewSize.width, previewSize.height, range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); // Check if we are already using this capture format, then we don't need to do anything. if (captureFormat.equals(this.captureFormat)) { return; } // Update camera parameters. Logging.d(TAG, "isVideoStabilizationSupported: " + parameters.isVideoStabilizationSupported()); if (parameters.isVideoStabilizationSupported()) { parameters.setVideoStabilization(true); } // Note: setRecordingHint(true) actually decrease frame rate on N5. // parameters.setRecordingHint(true); if (captureFormat.maxFramerate > 0) { parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.maxFramerate); } parameters.setPreviewSize(captureFormat.width, captureFormat.height); parameters.setPreviewFormat(captureFormat.imageFormat); // Picture size is for taking pictures and not for preview/video, but we need to set it anyway // as a workaround for an aspect ratio problem on Nexus 7. final Camera.Size pictureSize = CameraEnumerationAndroid.getClosestSupportedSize( parameters.getSupportedPictureSizes(), width, height); parameters.setPictureSize(pictureSize.width, pictureSize.height); // Temporarily stop preview if it's already running. if (this.captureFormat != null) { camera.stopPreview(); dropNextFrame = true; // Calling |setPreviewCallbackWithBuffer| with null should clear the internal camera buffer // queue, but sometimes we receive a frame with the old resolution after this call anyway. camera.setPreviewCallbackWithBuffer(null); } // (Re)start preview. Logging.d(TAG, "Start capturing: " + captureFormat); this.captureFormat = captureFormat; List<String> focusModes = parameters.getSupportedFocusModes(); if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); } camera.setParameters(parameters); if (!isCapturingToTexture) { videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera); camera.setPreviewCallbackWithBuffer(this); } camera.startPreview(); }
public void openCamera() { if (!isCreated || camera != null) { return; } // Open camera // try { camera = Camera.open(currentCameraId); } catch (NoSuchMethodError noSuchMethodError) { camera = Camera.open(); } try { camera.setPreviewDisplay(getHolder()); camera.setDisplayOrientation(90); camera.setPreviewCallback(cameraPreviewCallback); } catch (IOException ignore) { ignore.printStackTrace(); } catch (NullPointerException ignore) { ignore.printStackTrace(); } // get camera parameters // final Camera.Parameters parameters = camera.getParameters(); // Get Preview Size and FPS // final List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes(); final List<int[]> supportedPreviewFpsRange = parameters.getSupportedPreviewFpsRange(); // set FPS // int[] chosenFPSRange = supportedPreviewFpsRange.get(0); for (int[] FPSRange : supportedPreviewFpsRange) { if (FPS > FPSRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] && FPS < FPSRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]) { chosenFPSRange = FPSRange; break; } } parameters.setPreviewFpsRange( chosenFPSRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], chosenFPSRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); // set Preview Size // if (frameSize != null) { parameters.setPreviewSize(frameSize.width, frameSize.height); } else { int firstElementWidth = supportedPreviewSizes.get(0).width; int lastElementWidth = supportedPreviewSizes.get(supportedPreviewSizes.size() - 1).width; Camera.Size minPreviewSize = (lastElementWidth > firstElementWidth) ? supportedPreviewSizes.get(0) : supportedPreviewSizes.get(supportedPreviewSizes.size() - 1); parameters.setPreviewSize(minPreviewSize.width, minPreviewSize.height); } // Set parameters and start preview // try { camera.setParameters(parameters); camera.startPreview(); } catch (RuntimeException ignore) { ignore.printStackTrace(); } }
// Returns true on success, false otherwise. @CalledByNative public boolean allocate(int width, int height, int frameRate) { Log.d(TAG, "allocate: requested (" + width + "x" + height + ")@" + frameRate + "fps"); try { mCamera = Camera.open(mId); } catch (RuntimeException ex) { Log.e(TAG, "allocate: Camera.open: " + ex); return false; } Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); Camera.getCameraInfo(mId, cameraInfo); mCameraOrientation = cameraInfo.orientation; mCameraFacing = cameraInfo.facing; mDeviceOrientation = getDeviceOrientation(); Log.d( TAG, "allocate: orientation dev=" + mDeviceOrientation + ", cam=" + mCameraOrientation + ", facing=" + mCameraFacing); Camera.Parameters parameters = mCamera.getParameters(); // getSupportedPreviewFpsRange() returns a List with at least one // element, but when camera is in bad state, it can return null pointer. List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange(); if (listFpsRange == null || listFpsRange.size() == 0) { Log.e(TAG, "allocate: no fps range found"); return false; } int frameRateInMs = frameRate * 1000; // Use the first range as default. int[] fpsMinMax = listFpsRange.get(0); int newFrameRate = (fpsMinMax[0] + 999) / 1000; for (int[] fpsRange : listFpsRange) { if (fpsRange[0] <= frameRateInMs && frameRateInMs <= fpsRange[1]) { fpsMinMax = fpsRange; newFrameRate = frameRate; break; } } frameRate = newFrameRate; Log.d(TAG, "allocate: fps set to " + frameRate); // Calculate size. List<Camera.Size> listCameraSize = parameters.getSupportedPreviewSizes(); int minDiff = Integer.MAX_VALUE; int matchedWidth = width; int matchedHeight = height; for (Camera.Size size : listCameraSize) { int diff = Math.abs(size.width - width) + Math.abs(size.height - height); Log.d(TAG, "allocate: supported (" + size.width + ", " + size.height + "), diff=" + diff); // TODO(wjia): Remove this hack (forcing width to be multiple // of 32) by supporting stride in video frame buffer. // Right now, VideoCaptureController requires compact YV12 // (i.e., with no padding). if (diff < minDiff && (size.width % 32 == 0)) { minDiff = diff; matchedWidth = size.width; matchedHeight = size.height; } } if (minDiff == Integer.MAX_VALUE) { Log.e(TAG, "allocate: can not find a multiple-of-32 resolution"); return false; } mCaptureFormat = new CaptureFormat(matchedWidth, matchedHeight, frameRate, BuggyDeviceHack.getImageFormat()); // Hack to avoid certain capture resolutions under a minimum one, // see http://crbug.com/305294 BuggyDeviceHack.applyMinDimensions(mCaptureFormat); Log.d(TAG, "allocate: matched (" + mCaptureFormat.mWidth + "x" + mCaptureFormat.mHeight + ")"); if (parameters.isVideoStabilizationSupported()) { Log.d( TAG, "Image stabilization supported, currently: " + parameters.getVideoStabilization() + ", setting it."); parameters.setVideoStabilization(true); } else { Log.d(TAG, "Image stabilization not supported."); } parameters.setPreviewSize(mCaptureFormat.mWidth, mCaptureFormat.mHeight); parameters.setPreviewFormat(mCaptureFormat.mPixelFormat); parameters.setPreviewFpsRange(fpsMinMax[0], fpsMinMax[1]); mCamera.setParameters(parameters); // Set SurfaceTexture. Android Capture needs a SurfaceTexture even if // it is not going to be used. mGlTextures = new int[1]; // Generate one texture pointer and bind it as an external texture. GLES20.glGenTextures(1, mGlTextures, 0); GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]); // No mip-mapping with camera source. GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); // Clamp to edge is only option. GLES20.glTexParameteri( GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri( GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); mSurfaceTexture = new SurfaceTexture(mGlTextures[0]); mSurfaceTexture.setOnFrameAvailableListener(null); try { mCamera.setPreviewTexture(mSurfaceTexture); } catch (IOException ex) { Log.e(TAG, "allocate: " + ex); return false; } int bufSize = mCaptureFormat.mWidth * mCaptureFormat.mHeight * ImageFormat.getBitsPerPixel(mCaptureFormat.mPixelFormat) / 8; for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) { byte[] buffer = new byte[bufSize]; mCamera.addCallbackBuffer(buffer); } mExpectedFrameSize = bufSize; return true; }
/** * Set the legacy parameters using the {@link LegacyRequest legacy request}. * * <p>The legacy request's parameters are changed as a side effect of calling this method. * * @param legacyRequest a non-{@code null} legacy request */ public static void convertRequestMetadata(LegacyRequest legacyRequest) { CameraCharacteristics characteristics = legacyRequest.characteristics; CaptureRequest request = legacyRequest.captureRequest; Size previewSize = legacyRequest.previewSize; Camera.Parameters params = legacyRequest.parameters; Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); /* * scaler.cropRegion */ ParameterUtils.ZoomData zoomData; { zoomData = ParameterUtils.convertScalerCropRegion( activeArray, request.get(SCALER_CROP_REGION), previewSize, params); if (params.isZoomSupported()) { params.setZoom(zoomData.zoomIndex); } else if (VERBOSE) { Log.v(TAG, "convertRequestToMetadata - zoom is not supported"); } } /* * colorCorrection.* */ // colorCorrection.aberrationMode { int aberrationMode = ParamsUtils.getOrDefault( request, COLOR_CORRECTION_ABERRATION_MODE, /*defaultValue*/ COLOR_CORRECTION_ABERRATION_MODE_FAST); if (aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_FAST) { Log.w( TAG, "convertRequestToMetadata - Ignoring unsupported " + "colorCorrection.aberrationMode = " + aberrationMode); } } /* * control.ae* */ // control.aeAntibandingMode { String legacyMode; Integer antiBandingMode = request.get(CONTROL_AE_ANTIBANDING_MODE); if (antiBandingMode != null) { legacyMode = convertAeAntiBandingModeToLegacy(antiBandingMode); } else { legacyMode = ListUtils.listSelectFirstFrom( params.getSupportedAntibanding(), new String[] { Parameters.ANTIBANDING_AUTO, Parameters.ANTIBANDING_OFF, Parameters.ANTIBANDING_50HZ, Parameters.ANTIBANDING_60HZ, }); } if (legacyMode != null) { params.setAntibanding(legacyMode); } } /* * control.aeRegions, afRegions */ { // aeRegions { // Use aeRegions if available, fall back to using awbRegions if present MeteringRectangle[] aeRegions = request.get(CONTROL_AE_REGIONS); if (request.get(CONTROL_AWB_REGIONS) != null) { Log.w( TAG, "convertRequestMetadata - control.awbRegions setting is not " + "supported, ignoring value"); } int maxNumMeteringAreas = params.getMaxNumMeteringAreas(); List<Camera.Area> meteringAreaList = convertMeteringRegionsToLegacy( activeArray, zoomData, aeRegions, maxNumMeteringAreas, /*regionName*/ "AE"); // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null). if (maxNumMeteringAreas > 0) { params.setMeteringAreas(meteringAreaList); } } // afRegions { MeteringRectangle[] afRegions = request.get(CONTROL_AF_REGIONS); int maxNumFocusAreas = params.getMaxNumFocusAreas(); List<Camera.Area> focusAreaList = convertMeteringRegionsToLegacy( activeArray, zoomData, afRegions, maxNumFocusAreas, /*regionName*/ "AF"); // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null). if (maxNumFocusAreas > 0) { params.setFocusAreas(focusAreaList); } } } // control.aeTargetFpsRange Range<Integer> aeFpsRange = request.get(CONTROL_AE_TARGET_FPS_RANGE); if (aeFpsRange != null) { int[] legacyFps = convertAeFpsRangeToLegacy(aeFpsRange); // TODO - Should we enforce that all HAL1 devices must include (30, 30) FPS range? boolean supported = false; for (int[] range : params.getSupportedPreviewFpsRange()) { if (legacyFps[0] == range[0] && legacyFps[1] == range[1]) { supported = true; break; } } if (supported) { params.setPreviewFpsRange( legacyFps[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], legacyFps[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); } else { Log.w(TAG, "Unsupported FPS range set [" + legacyFps[0] + "," + legacyFps[1] + "]"); } } /* * control */ // control.aeExposureCompensation { Range<Integer> compensationRange = characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE); int compensation = ParamsUtils.getOrDefault(request, CONTROL_AE_EXPOSURE_COMPENSATION, /*defaultValue*/ 0); if (!compensationRange.contains(compensation)) { Log.w( TAG, "convertRequestMetadata - control.aeExposureCompensation " + "is out of range, ignoring value"); compensation = 0; } params.setExposureCompensation(compensation); } // control.aeLock { Boolean aeLock = getIfSupported( request, CONTROL_AE_LOCK, /*defaultValue*/ false, params.isAutoExposureLockSupported(), /*allowedValue*/ false); if (aeLock != null) { params.setAutoExposureLock(aeLock); } if (VERBOSE) { Log.v(TAG, "convertRequestToMetadata - control.aeLock set to " + aeLock); } // TODO: Don't add control.aeLock to availableRequestKeys if it's not supported } // control.aeMode, flash.mode mapAeAndFlashMode(request, /*out*/ params); // control.afMode { int afMode = ParamsUtils.getOrDefault(request, CONTROL_AF_MODE, /*defaultValue*/ CONTROL_AF_MODE_OFF); String focusMode = LegacyMetadataMapper.convertAfModeToLegacy(afMode, params.getSupportedFocusModes()); if (focusMode != null) { params.setFocusMode(focusMode); } if (VERBOSE) { Log.v( TAG, "convertRequestToMetadata - control.afMode " + afMode + " mapped to " + focusMode); } } // control.awbMode { Integer awbMode = getIfSupported( request, CONTROL_AWB_MODE, /*defaultValue*/ CONTROL_AWB_MODE_AUTO, params.getSupportedWhiteBalance() != null, /*allowedValue*/ CONTROL_AWB_MODE_AUTO); String whiteBalanceMode = null; if (awbMode != null) { // null iff AWB is not supported by camera1 api whiteBalanceMode = convertAwbModeToLegacy(awbMode); params.setWhiteBalance(whiteBalanceMode); } if (VERBOSE) { Log.v( TAG, "convertRequestToMetadata - control.awbMode " + awbMode + " mapped to " + whiteBalanceMode); } } // control.awbLock { Boolean awbLock = getIfSupported( request, CONTROL_AWB_LOCK, /*defaultValue*/ false, params.isAutoWhiteBalanceLockSupported(), /*allowedValue*/ false); if (awbLock != null) { params.setAutoWhiteBalanceLock(awbLock); } // TODO: Don't add control.awbLock to availableRequestKeys if it's not supported } // control.captureIntent { int captureIntent = ParamsUtils.getOrDefault( request, CONTROL_CAPTURE_INTENT, /*defaultValue*/ CONTROL_CAPTURE_INTENT_PREVIEW); captureIntent = filterSupportedCaptureIntent(captureIntent); params.setRecordingHint( captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_RECORD || captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT); } // control.videoStabilizationMode { Integer stabMode = getIfSupported( request, CONTROL_VIDEO_STABILIZATION_MODE, /*defaultValue*/ CONTROL_VIDEO_STABILIZATION_MODE_OFF, params.isVideoStabilizationSupported(), /*allowedValue*/ CONTROL_VIDEO_STABILIZATION_MODE_OFF); if (stabMode != null) { params.setVideoStabilization(stabMode == CONTROL_VIDEO_STABILIZATION_MODE_ON); } } // lens.focusDistance { boolean infinityFocusSupported = ListUtils.listContains(params.getSupportedFocusModes(), Parameters.FOCUS_MODE_INFINITY); Float focusDistance = getIfSupported( request, LENS_FOCUS_DISTANCE, /*defaultValue*/ 0f, infinityFocusSupported, /*allowedValue*/ 0f); if (focusDistance == null || focusDistance != 0f) { Log.w( TAG, "convertRequestToMetadata - Ignoring android.lens.focusDistance " + infinityFocusSupported + ", only 0.0f is supported"); } } // control.sceneMode, control.mode { // TODO: Map FACE_PRIORITY scene mode to face detection. if (params.getSupportedSceneModes() != null) { int controlMode = ParamsUtils.getOrDefault(request, CONTROL_MODE, /*defaultValue*/ CONTROL_MODE_AUTO); String modeToSet; switch (controlMode) { case CONTROL_MODE_USE_SCENE_MODE: { int sceneMode = ParamsUtils.getOrDefault( request, CONTROL_SCENE_MODE, /*defaultValue*/ CONTROL_SCENE_MODE_DISABLED); String legacySceneMode = LegacyMetadataMapper.convertSceneModeToLegacy(sceneMode); if (legacySceneMode != null) { modeToSet = legacySceneMode; } else { modeToSet = Parameters.SCENE_MODE_AUTO; Log.w(TAG, "Skipping unknown requested scene mode: " + sceneMode); } break; } case CONTROL_MODE_AUTO: { modeToSet = Parameters.SCENE_MODE_AUTO; break; } default: { Log.w(TAG, "Control mode " + controlMode + " is unsupported, defaulting to AUTO"); modeToSet = Parameters.SCENE_MODE_AUTO; } } params.setSceneMode(modeToSet); } } // control.effectMode { if (params.getSupportedColorEffects() != null) { int effectMode = ParamsUtils.getOrDefault( request, CONTROL_EFFECT_MODE, /*defaultValue*/ CONTROL_EFFECT_MODE_OFF); String legacyEffectMode = LegacyMetadataMapper.convertEffectModeToLegacy(effectMode); if (legacyEffectMode != null) { params.setColorEffect(legacyEffectMode); } else { params.setColorEffect(Parameters.EFFECT_NONE); Log.w(TAG, "Skipping unknown requested effect mode: " + effectMode); } } } /* * sensor */ // sensor.testPattern { int testPatternMode = ParamsUtils.getOrDefault( request, SENSOR_TEST_PATTERN_MODE, /*defaultValue*/ SENSOR_TEST_PATTERN_MODE_OFF); if (testPatternMode != SENSOR_TEST_PATTERN_MODE_OFF) { Log.w( TAG, "convertRequestToMetadata - ignoring sensor.testPatternMode " + testPatternMode + "; only OFF is supported"); } } /* * jpeg.* */ // jpeg.gpsLocation { Location location = request.get(JPEG_GPS_LOCATION); if (location != null) { if (checkForCompleteGpsData(location)) { params.setGpsAltitude(location.getAltitude()); params.setGpsLatitude(location.getLatitude()); params.setGpsLongitude(location.getLongitude()); params.setGpsProcessingMethod(location.getProvider().toUpperCase()); params.setGpsTimestamp(location.getTime()); } else { Log.w(TAG, "Incomplete GPS parameters provided in location " + location); } } else { params.removeGpsData(); } } // jpeg.orientation { Integer orientation = request.get(CaptureRequest.JPEG_ORIENTATION); params.setRotation( ParamsUtils.getOrDefault( request, JPEG_ORIENTATION, (orientation == null) ? 0 : orientation)); } // jpeg.quality { params.setJpegQuality( 0xFF & ParamsUtils.getOrDefault(request, JPEG_QUALITY, DEFAULT_JPEG_QUALITY)); } // jpeg.thumbnailQuality { params.setJpegThumbnailQuality( 0xFF & ParamsUtils.getOrDefault(request, JPEG_THUMBNAIL_QUALITY, DEFAULT_JPEG_QUALITY)); } // jpeg.thumbnailSize { List<Camera.Size> sizes = params.getSupportedJpegThumbnailSizes(); if (sizes != null && sizes.size() > 0) { Size s = request.get(JPEG_THUMBNAIL_SIZE); boolean invalidSize = (s == null) ? false : !ParameterUtils.containsSize(sizes, s.getWidth(), s.getHeight()); if (invalidSize) { Log.w(TAG, "Invalid JPEG thumbnail size set " + s + ", skipping thumbnail..."); } if (s == null || invalidSize) { // (0,0) = "no thumbnail" in Camera API 1 params.setJpegThumbnailSize(/*width*/ 0, /*height*/ 0); } else { params.setJpegThumbnailSize(s.getWidth(), s.getHeight()); } } } /* * noiseReduction.* */ // noiseReduction.mode { int mode = ParamsUtils.getOrDefault( request, NOISE_REDUCTION_MODE, /*defaultValue*/ NOISE_REDUCTION_MODE_FAST); if (mode != NOISE_REDUCTION_MODE_FAST) { Log.w( TAG, "convertRequestToMetadata - Ignoring unsupported " + "noiseReduction.mode = " + mode); } } }