static Range<Rational> scaleRange(Range<Rational> range, int num, int den) { if (num == den) { return range; } return Range.create( scaleRatio(range.getLower(), num, den), scaleRatio(range.getUpper(), num, den)); }
/** * Returns the intersection of two sets of non-intersecting ranges * * @param one a sorted set of non-intersecting ranges in ascending order * @param another another sorted set of non-intersecting ranges in ascending order * @return the intersection of the two sets, sorted in ascending order */ public static <T extends Comparable<? super T>> Range<T>[] intersectSortedDistinctRanges( Range<T>[] one, Range<T>[] another) { int ix = 0; Vector<Range<T>> result = new Vector<Range<T>>(); for (Range<T> range : another) { while (ix < one.length && one[ix].getUpper().compareTo(range.getLower()) < 0) { ++ix; } while (ix < one.length && one[ix].getUpper().compareTo(range.getUpper()) < 0) { result.add(range.intersect(one[ix])); ++ix; } if (ix == one.length) { break; } if (one[ix].getLower().compareTo(range.getUpper()) <= 0) { result.add(range.intersect(one[ix])); } } return result.toArray(new Range[result.size()]); }
private void yuvBurstTestByCamera(String cameraId) throws Exception { // Parameters final int MAX_CONVERGENCE_FRAMES = 150; // 5 sec at 30fps final long MAX_PREVIEW_RESULT_TIMEOUT_MS = 1000; final int BURST_SIZE = 100; final float FRAME_DURATION_MARGIN_FRACTION = 0.1f; // Find a good preview size (bound to 1080p) final Size previewSize = mOrderedPreviewSizes.get(0); // Get maximum YUV_420_888 size final Size stillSize = getSortedSizesForFormat(cameraId, mCameraManager, ImageFormat.YUV_420_888, /*bound*/ null) .get(0); // Find max pipeline depth and sync latency final int maxPipelineDepth = mStaticInfo.getCharacteristics().get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH); final int maxSyncLatency = mStaticInfo.getCharacteristics().get(CameraCharacteristics.SYNC_MAX_LATENCY); // Find minimum frame duration for full-res YUV_420_888 StreamConfigurationMap config = mStaticInfo.getCharacteristics().get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); final long minStillFrameDuration = config.getOutputMinFrameDuration(ImageFormat.YUV_420_888, stillSize); // Find suitable target FPS range - as high as possible List<Range<Integer>> fpsRanges = Arrays.asList(mStaticInfo.getAeAvailableTargetFpsRangesChecked()); Range<Integer> targetRange = mStaticInfo.getAeMaxTargetFpsRange(); // Add 0.05 here so Fps like 29.99 evaluated to 30 int minBurstFps = (int) Math.floor(1e9 / minStillFrameDuration + 0.05f); boolean foundConstantMaxYUVRange = false; boolean foundYUVStreamingRange = false; for (Range<Integer> fpsRange : fpsRanges) { if (fpsRange.getLower() == minBurstFps && fpsRange.getUpper() == minBurstFps) { foundConstantMaxYUVRange = true; } if (fpsRange.getLower() <= 15 && fpsRange.getUpper() == minBurstFps) { foundYUVStreamingRange = true; } } assertTrue( String.format( "Cam %s: Target FPS range of (%d, %d) must be supported", cameraId, minBurstFps, minBurstFps), foundConstantMaxYUVRange); assertTrue( String.format( "Cam %s: Target FPS range of (x, %d) where x <= 15 must be supported", cameraId, minBurstFps), foundYUVStreamingRange); assertTrue( String.format( "Cam %s: No target FPS range found with minimum FPS above " + " 1/minFrameDuration (%d fps, duration %d ns) for full-resolution YUV", cameraId, minBurstFps, minStillFrameDuration), targetRange.getLower() >= minBurstFps); Log.i( TAG, String.format( "Selected frame rate range %d - %d for YUV burst", targetRange.getLower(), targetRange.getUpper())); // Check if READ_SENSOR_SETTINGS is supported final boolean checkSensorSettings = mStaticInfo.isCapabilitySupported( CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS); // Configure basic preview and burst settings CaptureRequest.Builder previewBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); CaptureRequest.Builder burstBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange); burstBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange); burstBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true); burstBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true); // Create session and start up preview SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); ImageDropperListener imageDropper = new ImageDropperListener(); prepareCaptureAndStartPreview( previewBuilder, burstBuilder, previewSize, stillSize, ImageFormat.YUV_420_888, resultListener, /*maxNumImages*/ 3, imageDropper); // Create burst List<CaptureRequest> burst = new ArrayList<>(); for (int i = 0; i < BURST_SIZE; i++) { burst.add(burstBuilder.build()); } // Converge AE/AWB int frameCount = 0; while (true) { CaptureResult result = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS); int aeState = result.get(CaptureResult.CONTROL_AE_STATE); int awbState = result.get(CaptureResult.CONTROL_AWB_STATE); if (DEBUG) { Log.d(TAG, "aeState: " + aeState + ". awbState: " + awbState); } if ((aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED || aeState == CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED) && awbState == CaptureResult.CONTROL_AWB_STATE_CONVERGED) { break; } frameCount++; assertTrue( String.format( "Cam %s: Can not converge AE and AWB within %d frames", cameraId, MAX_CONVERGENCE_FRAMES), frameCount < MAX_CONVERGENCE_FRAMES); } // Lock AF if there's a focuser if (mStaticInfo.hasFocuser()) { previewBuilder.set( CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START); mSession.capture(previewBuilder.build(), resultListener, mHandler); previewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE); frameCount = 0; while (true) { CaptureResult result = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS); int afState = result.get(CaptureResult.CONTROL_AF_STATE); if (DEBUG) { Log.d(TAG, "afState: " + afState); } if (afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED || afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) { break; } frameCount++; assertTrue( String.format( "Cam %s: Cannot lock AF within %d frames", cameraId, MAX_CONVERGENCE_FRAMES), frameCount < MAX_CONVERGENCE_FRAMES); } } // Lock AE/AWB previewBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true); previewBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true); CaptureRequest lockedRequest = previewBuilder.build(); mSession.setRepeatingRequest(lockedRequest, resultListener, mHandler); // Wait for first result with locking resultListener.drain(); CaptureResult lockedResult = resultListener.getCaptureResultForRequest(lockedRequest, maxPipelineDepth); int pipelineDepth = lockedResult.get(CaptureResult.REQUEST_PIPELINE_DEPTH); // Then start waiting on results to get the first result that should be synced // up, and also fire the burst as soon as possible if (maxSyncLatency == CameraCharacteristics.SYNC_MAX_LATENCY_PER_FRAME_CONTROL) { // The locked result we have is already synchronized so start the burst mSession.captureBurst(burst, resultListener, mHandler); } else { // Need to get a synchronized result, and may need to start burst later to // be synchronized correctly boolean burstSent = false; // Calculate how many requests we need to still send down to camera before we // know the settings have settled for the burst int numFramesWaited = maxSyncLatency; if (numFramesWaited == CameraCharacteristics.SYNC_MAX_LATENCY_UNKNOWN) { numFramesWaited = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY; } int requestsNeededToSync = numFramesWaited - pipelineDepth; for (int i = 0; i < numFramesWaited; i++) { if (!burstSent && requestsNeededToSync <= 0) { mSession.captureBurst(burst, resultListener, mHandler); burstSent = true; } lockedResult = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS); requestsNeededToSync--; } assertTrue("Cam " + cameraId + ": Burst failed to fire!", burstSent); } // Read in locked settings if supported long burstExposure = 0; long burstFrameDuration = 0; int burstSensitivity = 0; if (checkSensorSettings) { burstExposure = lockedResult.get(CaptureResult.SENSOR_EXPOSURE_TIME); burstFrameDuration = lockedResult.get(CaptureResult.SENSOR_FRAME_DURATION); burstSensitivity = lockedResult.get(CaptureResult.SENSOR_SENSITIVITY); assertTrue( String.format( "Cam %s: Frame duration %d ns too short compared to " + "exposure time %d ns", cameraId, burstFrameDuration, burstExposure), burstFrameDuration >= burstExposure); assertTrue( String.format("Cam %s: Exposure time is not valid: %d", cameraId, burstExposure), burstExposure > 0); assertTrue( String.format("Cam %s: Frame duration is not valid: %d", cameraId, burstFrameDuration), burstFrameDuration > 0); assertTrue( String.format("Cam %s: Sensitivity is not valid: %d", cameraId, burstSensitivity), burstSensitivity > 0); } // Process burst results int burstIndex = 0; CaptureResult burstResult = resultListener.getCaptureResultForRequest(burst.get(burstIndex), maxPipelineDepth + 1); long prevTimestamp = -1; final long frameDurationBound = (long) (minStillFrameDuration * (1 + FRAME_DURATION_MARGIN_FRACTION)); List<Long> frameDurations = new ArrayList<>(); while (true) { // Verify the result assertTrue( "Cam " + cameraId + ": Result doesn't match expected request", burstResult.getRequest() == burst.get(burstIndex)); // Verify locked settings if (checkSensorSettings) { long exposure = burstResult.get(CaptureResult.SENSOR_EXPOSURE_TIME); int sensitivity = burstResult.get(CaptureResult.SENSOR_SENSITIVITY); assertTrue("Cam " + cameraId + ": Exposure not locked!", exposure == burstExposure); assertTrue( "Cam " + cameraId + ": Sensitivity not locked!", sensitivity == burstSensitivity); } // Collect inter-frame durations long timestamp = burstResult.get(CaptureResult.SENSOR_TIMESTAMP); if (prevTimestamp != -1) { long frameDuration = timestamp - prevTimestamp; frameDurations.add(frameDuration); if (DEBUG) { Log.i( TAG, String.format("Frame %03d Duration %.2f ms", burstIndex, frameDuration / 1e6)); } } prevTimestamp = timestamp; // Get next result burstIndex++; if (burstIndex == BURST_SIZE) break; burstResult = resultListener.getCaptureResult(MAX_PREVIEW_RESULT_TIMEOUT_MS); } // Verify inter-frame durations long meanFrameSum = 0; for (Long duration : frameDurations) { meanFrameSum += duration; } float meanFrameDuration = (float) meanFrameSum / frameDurations.size(); float stddevSum = 0; for (Long duration : frameDurations) { stddevSum += (duration - meanFrameDuration) * (duration - meanFrameDuration); } float stddevFrameDuration = (float) Math.sqrt(1.f / (frameDurations.size() - 1) * stddevSum); Log.i( TAG, String.format( "Cam %s: Burst frame duration mean: %.1f, stddev: %.1f", cameraId, meanFrameDuration, stddevFrameDuration)); assertTrue( String.format( "Cam %s: Burst frame duration mean %.1f ns is larger than acceptable, " + "expecting below %d ns, allowing below %d", cameraId, meanFrameDuration, minStillFrameDuration, frameDurationBound), meanFrameDuration <= frameDurationBound); // Calculate upper 97.5% bound (assuming durations are normally distributed...) float limit95FrameDuration = meanFrameDuration + 2 * stddevFrameDuration; // Don't enforce this yet, but warn if (limit95FrameDuration > frameDurationBound) { Log.w( TAG, String.format( "Cam %s: Standard deviation is too large compared to limit: " + "mean: %.1f ms, stddev: %.1f ms: 95%% bound: %f ms", cameraId, meanFrameDuration / 1e6, stddevFrameDuration / 1e6, limit95FrameDuration / 1e6)); } }
static Range<Integer> alignRange(Range<Integer> range, int align) { return range.intersect( divUp(range.getLower(), align) * align, (range.getUpper() / align) * align); }
/** * Returns the equivalent factored range {@code newrange}, where for every {@code e}: {@code * newrange.contains(e)} implies that {@code range.contains(e * factor)}, and {@code * !newrange.contains(e)} implies that {@code !range.contains(e * factor)}. */ static Range<Long> factorRange(Range<Long> range, long factor) { if (factor == 1) { return range; } return Range.create(divUp(range.getLower(), factor), range.getUpper() / factor); }
/** * Returns the equivalent factored range {@code newrange}, where for every {@code e}: {@code * newrange.contains(e)} implies that {@code range.contains(e * factor)}, and {@code * !newrange.contains(e)} implies that {@code !range.contains(e * factor)}. */ static Range<Integer> factorRange(Range<Integer> range, int factor) { if (factor == 1) { return range; } return Range.create(divUp(range.getLower(), factor), range.getUpper() / factor); }
private static int[] convertAeFpsRangeToLegacy(Range<Integer> fpsRange) { int[] legacyFps = new int[2]; legacyFps[Parameters.PREVIEW_FPS_MIN_INDEX] = fpsRange.getLower(); legacyFps[Parameters.PREVIEW_FPS_MAX_INDEX] = fpsRange.getUpper(); return legacyFps; }