private int getBufferSize() { int cameraId = mICameraDeviceManager.getCurrentCameraId(); ICameraDevice cameraDevice = mICameraDeviceManager.getCameraDevice(cameraId); Size size = cameraDevice.getParameters().getPreviewSize(); int imageFormat = cameraDevice.getParameters().getPreviewFormat(); return size.width * size.height * ImageFormat.getBitsPerPixel(imageFormat) / 8; }
/** 开启相机拍摄 */ public void startCapture() { try { cameraInfo = new CameraInfo(); if (mCamera == null) { // mCamera = Camera.open(); camera_count = Camera.getNumberOfCameras(); Log.e(TAG, "camera count:" + camera_count); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { for (int i = 0; i < camera_count; i++) { CameraInfo info = new CameraInfo(); Camera.getCameraInfo(i, info); // find front camera if (info.facing == CameraInfo.CAMERA_FACING_FRONT) { Log.e(TAG, "to open front camera"); mCamera = Camera.open(i); Camera.getCameraInfo(i, cameraInfo); } } } if (mCamera == null) { Log.e(TAG, "AAAAA OPEN camera"); mCamera = Camera.open(); Camera.getCameraInfo(0, cameraInfo); } } mCamera.stopPreview(); mParameters = mCamera.getParameters(); if (isScreenOriatationPortrait()) { if (cameraInfo.orientation == 270 || cameraInfo.orientation == 0) mCamera.setDisplayOrientation(90); if (cameraInfo.orientation == 90) mCamera.setDisplayOrientation(270); } else { if (cameraInfo.orientation == 90) mCamera.setDisplayOrientation(180); } mParameters.setPreviewSize(mwidth, mheight); mParameters.setPreviewFrameRate(15); mCamera.setParameters(mParameters); int mformat = mParameters.getPreviewFormat(); int bitsperpixel = ImageFormat.getBitsPerPixel(mformat); Log.e(TAG, "pzy bitsperpixel: " + bitsperpixel); yuv_frame = new byte[mwidth * mheight * bitsperpixel / 8]; yuv_Rotate90 = new byte[mwidth * mheight * bitsperpixel / 8]; // yuv_Rotate90lr = new byte[mwidth * mheight * bitsperpixel / 8]; mCamera.addCallbackBuffer(yuv_frame); // mCamera.setPreviewDisplay(holder); mCamera.setPreviewDisplay(localSurfaceHolder); mCamera.setPreviewCallbackWithBuffer(this); EMVideoCallHelper.getInstance().setResolution(mwidth, mheight); mCamera.startPreview(); Log.d(TAG, "camera start preview"); } catch (Exception e) { e.printStackTrace(); if (mCamera != null) mCamera.release(); } }
// Called by native code. Returns true if capturer is started. // // Note that this actually opens the camera, which can be a slow operation and // thus might be done on a background thread, but ViE API needs a // synchronous success return value so we can't do that. private synchronized boolean startCapture(int width, int height, int min_mfps, int max_mfps) { Log.d(TAG, "startCapture: " + width + "x" + height + "@" + min_mfps + ":" + max_mfps); Throwable error = null; try { camera = Camera.open(id); localPreview = ViERenderer.GetLocalRenderer(); if (localPreview != null) { localPreview.addCallback(this); if (localPreview.getSurface() != null && localPreview.getSurface().isValid()) { camera.setPreviewDisplay(localPreview); } } else { // No local renderer (we only care about onPreviewFrame() buffers, not a // directly-displayed UI element). Camera won't capture without // setPreview{Texture,Display}, so we create a dummy SurfaceTexture and // hand it over to Camera, but never listen for frame-ready callbacks, // and never call updateTexImage on it. try { // "42" because http://goo.gl/KaEn8 dummySurfaceTexture = new SurfaceTexture(42); camera.setPreviewTexture(dummySurfaceTexture); } catch (IOException e) { throw new RuntimeException(e); } } Camera.Parameters parameters = camera.getParameters(); Log.d(TAG, "isVideoStabilizationSupported: " + parameters.isVideoStabilizationSupported()); if (parameters.isVideoStabilizationSupported()) { parameters.setVideoStabilization(true); } parameters.setPreviewSize(width, height); parameters.setPreviewFpsRange(min_mfps, max_mfps); int format = ImageFormat.NV21; parameters.setPreviewFormat(format); camera.setParameters(parameters); int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8; for (int i = 0; i < numCaptureBuffers; i++) { camera.addCallbackBuffer(new byte[bufSize]); } camera.setPreviewCallbackWithBuffer(this); camera.startPreview(); return true; } catch (IOException e) { error = e; } catch (RuntimeException e) { error = e; } Log.e(TAG, "startCapture failed", error); if (camera != null) { stopCapture(); } return false; }
public synchronized void setupCamera(int width, int height) { if (mCamera != null) { Log.i(TAG, "Setup Camera - " + width + "x" + height); Camera.Parameters params = mCamera.getParameters(); List<Camera.Size> sizes = params.getSupportedPreviewSizes(); mFrameWidth = width; mFrameHeight = height; // selecting optimal camera preview size { int minDiff = Integer.MAX_VALUE; for (Camera.Size size : sizes) { if (Math.abs(size.height - height) < minDiff) { mFrameWidth = size.width; mFrameHeight = size.height; minDiff = Math.abs(size.height - height); } } } params.setPreviewSize(getFrameWidth(), getFrameHeight()); List<String> FocusModes = params.getSupportedFocusModes(); if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); } mCamera.setParameters(params); /* Now allocate the buffer */ params = mCamera.getParameters(); int size = params.getPreviewSize().width * params.getPreviewSize().height; size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8; mBuffer = new byte[size]; /* The buffer where the current frame will be copied */ mFrame = new byte[size]; mCamera.addCallbackBuffer(mBuffer); /* Notify that the preview is about to be started and deliver preview size */ onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height); try { setPreview(); } catch (IOException e) { Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e); } /* Now we can start a preview */ mCamera.startPreview(); } }
private void initializeCamera() { Camera.Parameters p = mCamera.getParameters(); // Hack for Samsung devices (e.g. Nexus 5) ; otherwise callback is never called // See http://stackoverflow.com/questions/5788993/use-android-camera-without-surface-view try { mCameraTexture = new SurfaceTexture(10); mCamera.setPreviewTexture(mCameraTexture); } catch (Throwable ignored) { } // Choose smallest preview size available List<Camera.Size> sizes = p.getSupportedPictureSizes(); int bestIndex = -1; int pixelCount = Integer.MAX_VALUE; for (int i = 0; i < sizes.size(); i++) { int w = sizes.get(i).width, h = sizes.get(i).height; int count = w * h; if (count < pixelCount) { bestIndex = i; pixelCount = count; // Note pixelCount is reused below } } Camera.Size size = sizes.get(bestIndex); mCameraWidth = size.width; mCameraHeight = size.height; Log.d(TAG, "W:" + mCameraWidth + ",H:" + mCameraHeight + ", num pixels:" + pixelCount); mCameraDecodedRGB = new int[pixelCount]; p.setPreviewSize(mCameraWidth, mCameraHeight); p.setPreviewFormat(ImageFormat.NV21); mCamera.setParameters(p); int bitsPerPixel = ImageFormat.getBitsPerPixel(p.getPreviewFormat()); int bytes = (pixelCount * bitsPerPixel) / 8; // Double Buffer - allow a buffer to be filled with new data while we are using the other one mCamera.addCallbackBuffer(new byte[bytes]); mCamera.addCallbackBuffer(new byte[bytes]); mCamera.setPreviewCallbackWithBuffer(this); mPreviewRunning = true; mCamera.startPreview(); Log.d(TAG, "startPreview()"); }
/** * Creates one buffer for the camera preview callback. The size of the buffer is based off of the * camera preview size and the format of the camera image. * * @return a new preview buffer of the appropriate size for the current camera settings */ private byte[] createPreviewBuffer(Size previewSize) { int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21); long sizeInBits = previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1; // // NOTICE: This code only works when using play services v. 8.1 or higher. // // Creating the byte array this way and wrapping it, as opposed to using .allocate(), // should guarantee that there will be an array to work with. byte[] byteArray = new byte[bufferSize]; ByteBuffer buffer = ByteBuffer.wrap(byteArray); if (!buffer.hasArray() || (buffer.array() != byteArray)) { // I don't think that this will ever happen. But if it does, then we wouldn't be // passing the preview content to the underlying detector later. throw new IllegalStateException(mContext.getString(R.string.failed_to_create_vaild_buffer)); } mBytesToByteBuffer.put(byteArray, buffer); return byteArray; }
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { // Now that the size is known, set up the camera parameters and begin // the preview. Camera.Parameters parameters = mCamera.getParameters(); List<Size> sizes = parameters.getSupportedPreviewSizes(); Size optimalSize = getOptimalPreviewSize(sizes, w, h); parameters.setPreviewSize(optimalSize.width, optimalSize.height); mCamera.setParameters(parameters); if (previewCallback != null) { mCamera.setPreviewCallbackWithBuffer(previewCallback); Camera.Size size = parameters.getPreviewSize(); byte[] data = new byte [size.width * size.height * ImageFormat.getBitsPerPixel(parameters.getPreviewFormat()) / 8]; mCamera.addCallbackBuffer(data); } mCamera.startPreview(); }
@TargetApi(11) protected boolean initializeCamera(int width, int height) { Log.d(TAG, "Initialize java camera"); boolean result = true; synchronized (this) { mCamera = null; if (mCameraIndex == -1) { Log.d(TAG, "Trying to open camera with old open()"); try { mCamera = Camera.open(); } catch (Exception e) { Log.e( TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage()); } if (mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { boolean connected = false; for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")"); try { mCamera = Camera.open(camIdx); connected = true; } catch (RuntimeException e) { Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage()); } if (connected) break; } } } else { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(mCameraIndex) + ")"); try { mCamera = Camera.open(mCameraIndex); } catch (RuntimeException e) { Log.e(TAG, "Camera #" + mCameraIndex + "failed to open: " + e.getLocalizedMessage()); } } } if (mCamera == null) return false; /* Now set camera parameters */ try { Camera.Parameters params = mCamera.getParameters(); Log.d(TAG, "getSupportedPreviewSizes()"); List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes(); if (sizes != null) { /* Select the size that fits surface considering maximum size allowed */ Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height); params.setPreviewFormat(ImageFormat.NV21); Log.d( TAG, "Set preview size to " + Integer.valueOf((int) frameSize.width) + "x" + Integer.valueOf((int) frameSize.height)); params.setPreviewSize((int) frameSize.width, (int) frameSize.height); List<String> FocusModes = params.getSupportedFocusModes(); if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); } mCamera.setParameters(params); params = mCamera.getParameters(); mFrameWidth = params.getPreviewSize().width; mFrameHeight = params.getPreviewSize().height; if (mFpsMeter != null) { mFpsMeter.setResolution(mFrameWidth, mFrameHeight); } int size = mFrameWidth * mFrameHeight; size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8; mBuffer = new byte[size]; mCamera.addCallbackBuffer(mBuffer); mCamera.setPreviewCallbackWithBuffer(this); mBaseMat = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1); mFrameChain = new Mat[2]; mFrameChain[0] = new Mat(); mFrameChain[1] = new Mat(); AllocateCache(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID); getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); mCamera.setPreviewTexture(mSurfaceTexture); } else mCamera.setPreviewDisplay(null); /* Finally we are ready to start the preview */ Log.d(TAG, "startPreview"); mCamera.startPreview(); } else result = false; } catch (Exception e) { result = false; e.printStackTrace(); } } return result; }
// Returns true on success, false otherwise. @CalledByNative public boolean allocate(int width, int height, int frameRate) { Log.d(TAG, "allocate: requested (" + width + "x" + height + ")@" + frameRate + "fps"); try { mCamera = Camera.open(mId); } catch (RuntimeException ex) { Log.e(TAG, "allocate: Camera.open: " + ex); return false; } Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); Camera.getCameraInfo(mId, cameraInfo); mCameraOrientation = cameraInfo.orientation; mCameraFacing = cameraInfo.facing; mDeviceOrientation = getDeviceOrientation(); Log.d( TAG, "allocate: orientation dev=" + mDeviceOrientation + ", cam=" + mCameraOrientation + ", facing=" + mCameraFacing); Camera.Parameters parameters = mCamera.getParameters(); // getSupportedPreviewFpsRange() returns a List with at least one // element, but when camera is in bad state, it can return null pointer. List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange(); if (listFpsRange == null || listFpsRange.size() == 0) { Log.e(TAG, "allocate: no fps range found"); return false; } int frameRateInMs = frameRate * 1000; // Use the first range as default. int[] fpsMinMax = listFpsRange.get(0); int newFrameRate = (fpsMinMax[0] + 999) / 1000; for (int[] fpsRange : listFpsRange) { if (fpsRange[0] <= frameRateInMs && frameRateInMs <= fpsRange[1]) { fpsMinMax = fpsRange; newFrameRate = frameRate; break; } } frameRate = newFrameRate; Log.d(TAG, "allocate: fps set to " + frameRate); // Calculate size. List<Camera.Size> listCameraSize = parameters.getSupportedPreviewSizes(); int minDiff = Integer.MAX_VALUE; int matchedWidth = width; int matchedHeight = height; for (Camera.Size size : listCameraSize) { int diff = Math.abs(size.width - width) + Math.abs(size.height - height); Log.d(TAG, "allocate: supported (" + size.width + ", " + size.height + "), diff=" + diff); // TODO(wjia): Remove this hack (forcing width to be multiple // of 32) by supporting stride in video frame buffer. // Right now, VideoCaptureController requires compact YV12 // (i.e., with no padding). if (diff < minDiff && (size.width % 32 == 0)) { minDiff = diff; matchedWidth = size.width; matchedHeight = size.height; } } if (minDiff == Integer.MAX_VALUE) { Log.e(TAG, "allocate: can not find a multiple-of-32 resolution"); return false; } mCaptureFormat = new CaptureFormat(matchedWidth, matchedHeight, frameRate, BuggyDeviceHack.getImageFormat()); // Hack to avoid certain capture resolutions under a minimum one, // see http://crbug.com/305294 BuggyDeviceHack.applyMinDimensions(mCaptureFormat); Log.d(TAG, "allocate: matched (" + mCaptureFormat.mWidth + "x" + mCaptureFormat.mHeight + ")"); if (parameters.isVideoStabilizationSupported()) { Log.d( TAG, "Image stabilization supported, currently: " + parameters.getVideoStabilization() + ", setting it."); parameters.setVideoStabilization(true); } else { Log.d(TAG, "Image stabilization not supported."); } parameters.setPreviewSize(mCaptureFormat.mWidth, mCaptureFormat.mHeight); parameters.setPreviewFormat(mCaptureFormat.mPixelFormat); parameters.setPreviewFpsRange(fpsMinMax[0], fpsMinMax[1]); mCamera.setParameters(parameters); // Set SurfaceTexture. Android Capture needs a SurfaceTexture even if // it is not going to be used. mGlTextures = new int[1]; // Generate one texture pointer and bind it as an external texture. GLES20.glGenTextures(1, mGlTextures, 0); GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]); // No mip-mapping with camera source. GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); // Clamp to edge is only option. GLES20.glTexParameteri( GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri( GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); mSurfaceTexture = new SurfaceTexture(mGlTextures[0]); mSurfaceTexture.setOnFrameAvailableListener(null); try { mCamera.setPreviewTexture(mSurfaceTexture); } catch (IOException ex) { Log.e(TAG, "allocate: " + ex); return false; } int bufSize = mCaptureFormat.mWidth * mCaptureFormat.mHeight * ImageFormat.getBitsPerPixel(mCaptureFormat.mPixelFormat) / 8; for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) { byte[] buffer = new byte[bufSize]; mCamera.addCallbackBuffer(buffer); } mExpectedFrameSize = bufSize; return true; }
protected boolean initializeCamera(int width, int height) { Log.d(TAG, "Initialize java camera"); boolean result = true; synchronized (this) { mCamera = null; if (mCameraIndex == CAMERA_ID_ANY) { Log.d(TAG, "Trying to open camera with old open()"); try { mCamera = Camera.open(); } catch (Exception e) { Log.e( TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage()); } if (mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { boolean connected = false; for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")"); try { mCamera = Camera.open(camIdx); connected = true; } catch (RuntimeException e) { Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage()); } if (connected) break; } } } else { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { int localCameraIndex = mCameraIndex; if (mCameraIndex == CAMERA_ID_BACK) { Log.i(TAG, "Trying to open back camera"); Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { Camera.getCameraInfo(camIdx, cameraInfo); if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { localCameraIndex = camIdx; break; } } } else if (mCameraIndex == CAMERA_ID_FRONT) { Log.i(TAG, "Trying to open front camera"); Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { Camera.getCameraInfo(camIdx, cameraInfo); if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { localCameraIndex = camIdx; break; } } } if (localCameraIndex == CAMERA_ID_BACK) { Log.e(TAG, "Back camera not found!"); } else if (localCameraIndex == CAMERA_ID_FRONT) { Log.e(TAG, "Front camera not found!"); } else { Log.d( TAG, "Trying to open camera with new open(" + Integer.valueOf(localCameraIndex) + ")"); try { mCamera = Camera.open(localCameraIndex); } catch (RuntimeException e) { Log.e( TAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage()); } } } } if (mCamera == null) return false; /* Now set camera parameters */ try { Camera.Parameters params = mCamera.getParameters(); Log.d(TAG, "getSupportedPreviewSizes()"); List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes(); if (sizes != null) { /* Select the size that fits surface considering maximum size allowed */ Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height); params.setPreviewFormat(ImageFormat.NV21); Log.d( TAG, "Set preview size to " + Integer.valueOf((int) frameSize.width) + "x" + Integer.valueOf((int) frameSize.height)); params.setPreviewSize((int) frameSize.width, (int) frameSize.height); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH && !android.os.Build.MODEL.equals("GT-I9100")) params.setRecordingHint(true); List<String> FocusModes = params.getSupportedFocusModes(); if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); } mCamera.setParameters(params); params = mCamera.getParameters(); mFrameWidth = params.getPreviewSize().width; mFrameHeight = params.getPreviewSize().height; if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT)) mScale = Math.min(((float) height) / mFrameHeight, ((float) width) / mFrameWidth); else mScale = 0; if (mFpsMeter != null) { mFpsMeter.setResolution(mFrameWidth, mFrameHeight); } int size = mFrameWidth * mFrameHeight; size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8; mBuffer = new byte[size]; mCamera.addCallbackBuffer(mBuffer); mCamera.setPreviewCallbackWithBuffer(this); mFrameChain = new Mat[2]; mFrameChain[0] = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1); mFrameChain[1] = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1); AllocateCache(); mCameraFrame = new JavaCameraFrame[2]; mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight); mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID); mCamera.setPreviewTexture(mSurfaceTexture); } else mCamera.setPreviewDisplay(null); /* Finally we are ready to start the preview */ Log.d(TAG, "startPreview"); mCamera.startPreview(); } else result = false; } catch (Exception e) { result = false; e.printStackTrace(); } } return result; }