@Override
  public int onStartCommand(Intent intent, int flags, int startId) {
    Log.v("FlashlightService", "Starting Flash");
    if (cam != null) return START_NOT_STICKY;
    try {
      cam = Camera.open();
      Parameters p = cam.getParameters();
      List<String> flashes = p.getSupportedFlashModes();
      if (flashes == null) return error(this, R.string.err_available);
      if (flashes.contains(Parameters.FLASH_MODE_TORCH))
        p.setFlashMode(Parameters.FLASH_MODE_TORCH);
      else if (flashes.contains(Parameters.FLASH_MODE_ON)) p.setFlashMode(Parameters.FLASH_MODE_ON);
      else return error(this, R.string.err_available);
      cam.setParameters(p);
      // Needed for some devices.
      cam.setPreviewTexture(new SurfaceTexture(0));
      // Needed for some more devices.
      cam.startPreview();

      // Keep phone awake with screen off
      wl =
          ((PowerManager) getSystemService(Context.POWER_SERVICE))
              .newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "FlashlightService");
      if (wl != null && !wl.isHeld()) wl.acquire();
      return START_NOT_STICKY;
    } catch (Exception e) {
      return error(this, R.string.err_access);
    }
  }
Ejemplo n.º 2
0
  /**
   * Opens the camera and starts sending preview frames to the underlying detector. The preview
   * frames are not displayed.
   *
   * @throws IOException if the camera's preview texture or display could not be initialized
   */
  @RequiresPermission(Manifest.permission.CAMERA)
  public CameraSource start() throws IOException {
    synchronized (mCameraLock) {
      if (mCamera != null) {
        return this;
      }

      mCamera = createCamera();

      // SurfaceTexture was introduced in Honeycomb (11), so if we are running and
      // old version of Android. fall back to use SurfaceView.
      if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
        mDummySurfaceTexture = new SurfaceTexture(DUMMY_TEXTURE_NAME);
        mCamera.setPreviewTexture(mDummySurfaceTexture);
      } else {
        mDummySurfaceView = new SurfaceView(mContext);
        mCamera.setPreviewDisplay(mDummySurfaceView.getHolder());
      }
      mCamera.startPreview();

      mProcessingThread = new Thread(mFrameProcessor);
      mFrameProcessor.setActive(true);
      mProcessingThread.start();
    }
    return this;
  }
Ejemplo n.º 3
0
  public int StartCapture(int width, int height, int frameRate) {
    Log.d(TAG, "StartCapture width " + width + " height " + height + " frame rate " + frameRate);
    // Get the local preview SurfaceHolder from the static render class
    localPreview = ViERenderer.GetLocalRenderer();
    if (localPreview != null) {
      if (localPreview.getSurface() != null && localPreview.getSurface().isValid()) {
        surfaceCreated(localPreview);
      }
      localPreview.addCallback(this);
    } else {
      // No local renderer.  Camera won't capture without
      // setPreview{Texture,Display}, so we create a dummy SurfaceTexture
      // and hand it over to Camera, but never listen for frame-ready
      // callbacks, and never call updateTexImage on it.
      captureLock.lock();
      try {
        dummySurfaceTexture = new SurfaceTexture(42);
        camera.setPreviewTexture(dummySurfaceTexture);
      } catch (IOException e) {
        throw new RuntimeException(e);
      }
      captureLock.unlock();
    }

    captureLock.lock();
    isCaptureStarted = true;
    mCaptureWidth = width;
    mCaptureHeight = height;
    mCaptureFPS = frameRate;

    int res = tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);

    captureLock.unlock();
    return res;
  }
 // Called by native code.  Returns true when camera is known to be stopped.
 private synchronized boolean stopCapture() {
   Log.d(TAG, "stopCapture");
   if (camera == null) {
     throw new RuntimeException("Camera is already stopped!");
   }
   Throwable error = null;
   try {
     camera.stopPreview();
     camera.setPreviewCallbackWithBuffer(null);
     if (localPreview != null) {
       localPreview.removeCallback(this);
       camera.setPreviewDisplay(null);
     } else {
       camera.setPreviewTexture(null);
     }
     camera.release();
     camera = null;
     return true;
   } catch (IOException e) {
     error = e;
   } catch (RuntimeException e) {
     error = e;
   }
   Log.e(TAG, "Failed to stop camera", error);
   return false;
 }
Ejemplo n.º 5
0
  protected synchronized void createCamera() throws RuntimeException {
    if (mSurfaceView == null) throw new InvalidSurfaceException("Invalid surface !");
    if (mSurfaceView.getHolder() == null || !mSurfaceReady)
      throw new InvalidSurfaceException("Invalid surface !");

    if (mCamera == null) {
      openCamera();
      mUpdated = false;
      mUnlocked = false;
      mCamera.setErrorCallback(
          new Camera.ErrorCallback() {
            @Override
            public void onError(int error, Camera camera) {
              // On some phones when trying to use the camera facing front the media server will die
              // Whether or not this callback may be called really depends on the phone
              if (error == Camera.CAMERA_ERROR_SERVER_DIED) {
                // In this case the application must release the camera and instantiate a new one
                Log.e(TAG, "Media server died !");
                // We don't know in what thread we are so stop needs to be synchronized
                mCameraOpenedManually = false;
                stop();
              } else {
                Log.e(TAG, "Error unknown with the camera: " + error);
              }
            }
          });

      try {

        // If the phone has a flash, we turn it on/off according to mFlashEnabled
        // setRecordingHint(true) is a very nice optimization if you plane to only use the Camera
        // for recording
        Parameters parameters = mCamera.getParameters();
        if (parameters.getFlashMode() != null) {
          parameters.setFlashMode(
              mFlashEnabled ? Parameters.FLASH_MODE_TORCH : Parameters.FLASH_MODE_OFF);
        }
        parameters.setRecordingHint(true);
        mCamera.setParameters(parameters);
        mCamera.setDisplayOrientation(mOrientation);

        try {
          if (mMode == MODE_MEDIACODEC_API_2) {
            mSurfaceView.startGLThread();
            mCamera.setPreviewTexture(mSurfaceView.getSurfaceTexture());
          } else {
            mCamera.setPreviewDisplay(mSurfaceView.getHolder());
          }
        } catch (IOException e) {
          throw new InvalidSurfaceException("Invalid surface !");
        }

      } catch (RuntimeException e) {
        destroyCamera();
        throw e;
      }
    }
  }
 /** Connects the SurfaceTexture to the Camera preview output, and starts the preview. */
 private void handleSetSurfaceTexture(SurfaceTexture st) {
   st.setOnFrameAvailableListener(this);
   try {
     mCamera.setPreviewTexture(st);
   } catch (IOException ioe) {
     throw new RuntimeException(ioe);
   }
   mCamera.startPreview();
 }
  public void startCamera(int texture) {
    surface = new SurfaceTexture(texture);
    surface.setOnFrameAvailableListener(this);
    renderer.setSurface(surface);

    mCamera = Camera.open();

    mCamera.setPreviewTexture(surface);
    mCamera.startPreview();
  }
  // Called by native code.  Returns true if capturer is started.
  //
  // Note that this actually opens the camera, which can be a slow operation and
  // thus might be done on a background thread, but ViE API needs a
  // synchronous success return value so we can't do that.
  private synchronized boolean startCapture(int width, int height, int min_mfps, int max_mfps) {
    Log.d(TAG, "startCapture: " + width + "x" + height + "@" + min_mfps + ":" + max_mfps);
    Throwable error = null;
    try {
      camera = Camera.open(id);

      localPreview = ViERenderer.GetLocalRenderer();
      if (localPreview != null) {
        localPreview.addCallback(this);
        if (localPreview.getSurface() != null && localPreview.getSurface().isValid()) {
          camera.setPreviewDisplay(localPreview);
        }
      } else {
        // No local renderer (we only care about onPreviewFrame() buffers, not a
        // directly-displayed UI element).  Camera won't capture without
        // setPreview{Texture,Display}, so we create a dummy SurfaceTexture and
        // hand it over to Camera, but never listen for frame-ready callbacks,
        // and never call updateTexImage on it.
        try {
          // "42" because http://goo.gl/KaEn8
          dummySurfaceTexture = new SurfaceTexture(42);
          camera.setPreviewTexture(dummySurfaceTexture);
        } catch (IOException e) {
          throw new RuntimeException(e);
        }
      }

      Camera.Parameters parameters = camera.getParameters();
      Log.d(TAG, "isVideoStabilizationSupported: " + parameters.isVideoStabilizationSupported());
      if (parameters.isVideoStabilizationSupported()) {
        parameters.setVideoStabilization(true);
      }
      parameters.setPreviewSize(width, height);
      parameters.setPreviewFpsRange(min_mfps, max_mfps);
      int format = ImageFormat.NV21;
      parameters.setPreviewFormat(format);
      camera.setParameters(parameters);
      int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
      for (int i = 0; i < numCaptureBuffers; i++) {
        camera.addCallbackBuffer(new byte[bufSize]);
      }
      camera.setPreviewCallbackWithBuffer(this);
      camera.startPreview();
      return true;
    } catch (IOException e) {
      error = e;
    } catch (RuntimeException e) {
      error = e;
    }
    Log.e(TAG, "startCapture failed", error);
    if (camera != null) {
      stopCapture();
    }
    return false;
  }
Ejemplo n.º 9
0
 @Override
 public CameraActions startPreview(SurfaceTexture texture) throws IOException {
   if (texture == null) {
     throw new NullPointerException("You cannot start preview without a preview texture");
   }
   if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
     camera.setPreviewTexture(texture);
   } else {
     throw new IllegalStateException("Your Android version does not support this method.");
   }
   camera.startPreview();
   return new DefaultCameraActions(this);
 }
Ejemplo n.º 10
0
  public void startCamera(int texture) {
    surface = new SurfaceTexture(texture);
    surface.setOnFrameAvailableListener(this);

    camera = Camera.open();

    try {
      camera.setPreviewTexture(surface);
      camera.startPreview();
    } catch (IOException ioe) {
      Log.w("MainActivity", "CAM LAUNCH FAILED");
    }
  }
Ejemplo n.º 11
0
  /** The Surface is created/init() */
  @Override
  public void onSurfaceCreated(GL10 gl, EGLConfig config) {
    Log.i("Almalence", "GLLayer.onSurfaceCreated()");

    PluginManager.getInstance().onGLSurfaceCreated(gl, config);

    if (PluginManager.getInstance().shouldPreviewToGPU()) {
      final int[] tex = new int[1];
      GLES20.glGenTextures(1, tex, 0);
      this.texture_preview = tex[0];

      GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, this.texture_preview);
      GLES20.glTexParameteri(
          GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
      GLES20.glTexParameteri(
          GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
      GLES20.glTexParameteri(
          GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
      GLES20.glTexParameteri(
          GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
      GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0);

      this.surfaceTexture = new SurfaceTexture(this.texture_preview);
      this.surfaceTexture.setOnFrameAvailableListener(
          new OnFrameAvailableListener() {
            @Override
            public void onFrameAvailable(final SurfaceTexture surfaceTexture) {
              PluginManager.getInstance().onFrameAvailable();
            }
          });

      final Camera camera = CameraController.getCamera();
      if (camera == null) {
        return;
      }

      try {
        camera.setDisplayOrientation(90);
      } catch (RuntimeException e) {
        e.printStackTrace();
      }

      try {
        camera.setPreviewTexture(this.surfaceTexture);
      } catch (final IOException e) {
        e.printStackTrace();
      }

      camera.startPreview();
    }
  }
Ejemplo n.º 12
0
  private void initializeCamera() {

    Camera.Parameters p = mCamera.getParameters();
    // Hack for Samsung devices (e.g. Nexus 5) ; otherwise callback is never called
    // See http://stackoverflow.com/questions/5788993/use-android-camera-without-surface-view
    try {
      mCameraTexture = new SurfaceTexture(10);
      mCamera.setPreviewTexture(mCameraTexture);
    } catch (Throwable ignored) {

    }

    // Choose smallest preview size available
    List<Camera.Size> sizes = p.getSupportedPictureSizes();
    int bestIndex = -1;
    int pixelCount = Integer.MAX_VALUE;

    for (int i = 0; i < sizes.size(); i++) {
      int w = sizes.get(i).width, h = sizes.get(i).height;
      int count = w * h;
      if (count < pixelCount) {
        bestIndex = i;
        pixelCount = count; // Note pixelCount is reused below
      }
    }

    Camera.Size size = sizes.get(bestIndex);

    mCameraWidth = size.width;
    mCameraHeight = size.height;

    Log.d(TAG, "W:" + mCameraWidth + ",H:" + mCameraHeight + ", num pixels:" + pixelCount);
    mCameraDecodedRGB = new int[pixelCount];

    p.setPreviewSize(mCameraWidth, mCameraHeight);
    p.setPreviewFormat(ImageFormat.NV21);
    mCamera.setParameters(p);

    int bitsPerPixel = ImageFormat.getBitsPerPixel(p.getPreviewFormat());
    int bytes = (pixelCount * bitsPerPixel) / 8;
    // Double Buffer - allow a buffer to be filled with new data while we are using the other one
    mCamera.addCallbackBuffer(new byte[bytes]);
    mCamera.addCallbackBuffer(new byte[bytes]);

    mCamera.setPreviewCallbackWithBuffer(this);
    mPreviewRunning = true;

    mCamera.startPreview();
    Log.d(TAG, "startPreview()");
  }
Ejemplo n.º 13
0
  /** Handles camera opening */
  private void openCamera(int which) {
    if (mCamera != null) {
      mCamera.stopPreview();
      mCamera.release();
      mCamera = null;
    }

    if (mCameraId >= 0) {
      Camera.getCameraInfo(mCameraId, mCameraInfo);
      if (which == FRONT) mCamera = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
      else mCamera = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);

      params = mCamera.getParameters();
      params.setRotation(0);

      /** set focus mode */
      List<String> FocusModes = params.getSupportedFocusModes();
      if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
        params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
      }

      mCamera.setParameters(params);
      try {
        if (mSurfaceTexture != null) {
          mCamera.setPreviewTexture(mSurfaceTexture);
          mCamera.startPreview();
        }
      } catch (Exception ex) {
      }
    }

    if (mCamera == null || mSharedData == null) {
      return;
    }

    int orientation = mCameraInfo.orientation + rot_angle;

    Matrix.setRotateM(mSharedData.mOrientationM, 0, orientation, 0f, 0f, 1f);

    Camera.Size size = mCamera.getParameters().getPreviewSize();
    if (orientation % 90 == 0) {
      int w = size.width;
      size.width = size.height;
      size.height = w;
    }

    mSharedData.mAspectRatioPreview[0] = (float) Math.min(size.width, size.height) / size.width;
    mSharedData.mAspectRatioPreview[1] = (float) Math.min(size.width, size.height) / size.height;
  }
Ejemplo n.º 14
0
  /**
   * Start the camera and set up its preview texture to the given texture (essentially copying
   * camera into texture.
   *
   * @param texture
   */
  public void startCamera(int texture) {
    surface = new SurfaceTexture(texture);
    surface.setOnFrameAvailableListener(this);

    camera = Camera.open();
    camera.setFaceDetectionListener(this);

    try {
      camera.setPreviewTexture(surface);
      camera.startPreview();
    } catch (IOException ioe) {
      Log.w("MainActivity", "Failed to start camera preview on texture.");
    }
    camera.startFaceDetection();
  }
Ejemplo n.º 15
0
  @CalledByNative
  public void deallocate() {
    if (mCamera == null) return;

    stopCapture();
    try {
      mCamera.setPreviewTexture(null);
      if (mGlTextures != null) GLES20.glDeleteTextures(1, mGlTextures, 0);
      mCaptureFormat = null;
      mCamera.release();
      mCamera = null;
    } catch (IOException ex) {
      Log.e(TAG, "deallocate: failed to deallocate camera, " + ex);
      return;
    }
  }
  @Override
  public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
    mCamera = Camera.open();
    mCamera.setDisplayOrientation(getCameraOrientation());

    Camera.Size previewSize = mCamera.getParameters().getPreviewSize();
    mTextureView.setLayoutParams(
        new FrameLayout.LayoutParams(previewSize.width, previewSize.height, Gravity.CENTER));

    try {
      mCamera.setPreviewTexture(surface);
    } catch (IOException t) {
      android.util.Log.e("TextureView", "Cannot set preview texture target!", t);
    }

    mCamera.startPreview();
  }
Ejemplo n.º 17
0
  /**
   * Closes the camera and stops sending frames to the underlying frame detector.
   *
   * <p>This camera source may be restarted again by calling {@link #start()} or {@link
   * #start(SurfaceHolder)}.
   *
   * <p>Call {@link #release()} instead to completely shut down this camera source and release the
   * resources of the underlying detector.
   */
  public void stop() {
    synchronized (mCameraLock) {
      mFrameProcessor.setActive(false);
      if (mProcessingThread != null) {
        try {
          // Wait for the thread to complete to ensure that we can't have multiple threads
          // executing at the same time (i.e., which would happen if we called start too
          // quickly after stop).
          mProcessingThread.join();
        } catch (InterruptedException e) {
          Log.d(TAG, mContext.getString(R.string.processing_interrupted));
        }
        mProcessingThread = null;
      }

      // clear the buffer to prevent oom exceptions
      mBytesToByteBuffer.clear();

      if (mCamera != null) {
        mCamera.stopPreview();
        mCamera.setPreviewCallbackWithBuffer(null);
        try {
          // We want to be compatible back to Gingerbread, but SurfaceTexture
          // wasn't introduced until Honeycomb.  Since the interface cannot use a SurfaceTexture, if
          // the
          // developer wants to display a preview we must use a SurfaceHolder.  If the developer
          // doesn't
          // want to display a preview we use a SurfaceTexture if we are running at least Honeycomb.

          if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
            mCamera.setPreviewTexture(null);

          } else {
            mCamera.setPreviewDisplay(null);
          }
        } catch (Exception e) {
          Log.e(TAG, mContext.getString(R.string.failed_to_clear_preview) + e);
        }
        mCamera.release();
        mCamera = null;
      }
    }
  }
Ejemplo n.º 18
0
  @SuppressLint("NewApi")
  @Override
  public void onSurfaceTextureAvailable(SurfaceTexture arg0, int arg1, int arg2) {
    mCamera = Camera.open();
    Camera.Size previewSize = mCamera.getParameters().getPreviewSize();
    Display display = getWindowManager().getDefaultDisplay();
    Point size = new Point();
    display.getSize(size);
    int width = size.x;
    int height = size.y;
    myTexture.setLayoutParams(new FrameLayout.LayoutParams(width, height, Gravity.CENTER));

    try {
      mCamera.setPreviewTexture(arg0);
    } catch (IOException t) {
    }
    mCamera.startPreview();
    myTexture.setAlpha(1.0f);
    //        myTexture.setRotation(90.0f);
    new GoGo().execute();
  }
  protected boolean initializeCamera(int width, int height) {
    Log.d(TAG, "Initialize java camera");
    boolean result = true;
    synchronized (this) {
      mCamera = null;

      if (mCameraIndex == CAMERA_ID_ANY) {
        Log.d(TAG, "Trying to open camera with old open()");
        try {
          mCamera = Camera.open();
        } catch (Exception e) {
          Log.e(
              TAG,
              "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
        }

        if (mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
          boolean connected = false;
          for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
            Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
            try {
              mCamera = Camera.open(camIdx);
              connected = true;
            } catch (RuntimeException e) {
              Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
            }
            if (connected) break;
          }
        }
      } else {
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
          int localCameraIndex = mCameraIndex;
          if (mCameraIndex == CAMERA_ID_BACK) {
            Log.i(TAG, "Trying to open back camera");
            Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
            for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
              Camera.getCameraInfo(camIdx, cameraInfo);
              if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
                localCameraIndex = camIdx;
                break;
              }
            }
          } else if (mCameraIndex == CAMERA_ID_FRONT) {
            Log.i(TAG, "Trying to open front camera");
            Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
            for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
              Camera.getCameraInfo(camIdx, cameraInfo);
              if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
                localCameraIndex = camIdx;
                break;
              }
            }
          }
          if (localCameraIndex == CAMERA_ID_BACK) {
            Log.e(TAG, "Back camera not found!");
          } else if (localCameraIndex == CAMERA_ID_FRONT) {
            Log.e(TAG, "Front camera not found!");
          } else {
            Log.d(
                TAG,
                "Trying to open camera with new open(" + Integer.valueOf(localCameraIndex) + ")");
            try {
              mCamera = Camera.open(localCameraIndex);
            } catch (RuntimeException e) {
              Log.e(
                  TAG,
                  "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
            }
          }
        }
      }

      if (mCamera == null) return false;

      /* Now set camera parameters */
      try {
        Camera.Parameters params = mCamera.getParameters();
        Log.d(TAG, "getSupportedPreviewSizes()");
        List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();

        if (sizes != null) {
          /* Select the size that fits surface considering maximum size allowed */
          Size frameSize =
              calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height);

          params.setPreviewFormat(ImageFormat.NV21);
          Log.d(
              TAG,
              "Set preview size to "
                  + Integer.valueOf((int) frameSize.width)
                  + "x"
                  + Integer.valueOf((int) frameSize.height));
          params.setPreviewSize((int) frameSize.width, (int) frameSize.height);

          if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH
              && !android.os.Build.MODEL.equals("GT-I9100")) params.setRecordingHint(true);

          List<String> FocusModes = params.getSupportedFocusModes();
          if (FocusModes != null
              && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
            params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
          }

          mCamera.setParameters(params);
          params = mCamera.getParameters();

          mFrameWidth = params.getPreviewSize().width;
          mFrameHeight = params.getPreviewSize().height;

          if ((getLayoutParams().width == LayoutParams.MATCH_PARENT)
              && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
            mScale = Math.min(((float) height) / mFrameHeight, ((float) width) / mFrameWidth);
          else mScale = 0;

          if (mFpsMeter != null) {
            mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
          }

          int size = mFrameWidth * mFrameHeight;
          size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
          mBuffer = new byte[size];

          mCamera.addCallbackBuffer(mBuffer);
          mCamera.setPreviewCallbackWithBuffer(this);

          mFrameChain = new Mat[2];
          mFrameChain[0] = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1);
          mFrameChain[1] = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1);

          AllocateCache();

          mCameraFrame = new JavaCameraFrame[2];
          mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight);
          mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);

          if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
            mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
            mCamera.setPreviewTexture(mSurfaceTexture);
          } else mCamera.setPreviewDisplay(null);

          /* Finally we are ready to start the preview */
          Log.d(TAG, "startPreview");
          mCamera.startPreview();
        } else result = false;
      } catch (Exception e) {
        result = false;
        e.printStackTrace();
      }
    }

    return result;
  }
Ejemplo n.º 20
0
  @TargetApi(11)
  protected boolean initializeCamera(int width, int height) {
    Log.d(TAG, "Initialize java camera");
    boolean result = true;
    synchronized (this) {
      mCamera = null;

      if (mCameraIndex == -1) {
        Log.d(TAG, "Trying to open camera with old open()");
        try {
          mCamera = Camera.open();
        } catch (Exception e) {
          Log.e(
              TAG,
              "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
        }

        if (mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
          boolean connected = false;
          for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
            Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
            try {
              mCamera = Camera.open(camIdx);
              connected = true;
            } catch (RuntimeException e) {
              Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
            }
            if (connected) break;
          }
        }
      } else {
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
          Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(mCameraIndex) + ")");
          try {
            mCamera = Camera.open(mCameraIndex);
          } catch (RuntimeException e) {
            Log.e(TAG, "Camera #" + mCameraIndex + "failed to open: " + e.getLocalizedMessage());
          }
        }
      }

      if (mCamera == null) return false;

      /* Now set camera parameters */
      try {
        Camera.Parameters params = mCamera.getParameters();
        Log.d(TAG, "getSupportedPreviewSizes()");
        List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();

        if (sizes != null) {
          /* Select the size that fits surface considering maximum size allowed */
          Size frameSize =
              calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height);

          params.setPreviewFormat(ImageFormat.NV21);
          Log.d(
              TAG,
              "Set preview size to "
                  + Integer.valueOf((int) frameSize.width)
                  + "x"
                  + Integer.valueOf((int) frameSize.height));
          params.setPreviewSize((int) frameSize.width, (int) frameSize.height);

          List<String> FocusModes = params.getSupportedFocusModes();
          if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
            params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
          }

          mCamera.setParameters(params);
          params = mCamera.getParameters();

          mFrameWidth = params.getPreviewSize().width;
          mFrameHeight = params.getPreviewSize().height;

          if (mFpsMeter != null) {
            mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
          }

          int size = mFrameWidth * mFrameHeight;
          size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
          mBuffer = new byte[size];

          mCamera.addCallbackBuffer(mBuffer);
          mCamera.setPreviewCallbackWithBuffer(this);

          mBaseMat = new Mat(mFrameHeight + (mFrameHeight / 2), mFrameWidth, CvType.CV_8UC1);

          mFrameChain = new Mat[2];
          mFrameChain[0] = new Mat();
          mFrameChain[1] = new Mat();

          AllocateCache();

          if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
            mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
            getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
            mCamera.setPreviewTexture(mSurfaceTexture);
          } else mCamera.setPreviewDisplay(null);

          /* Finally we are ready to start the preview */
          Log.d(TAG, "startPreview");
          mCamera.startPreview();
        } else result = false;
      } catch (Exception e) {
        result = false;
        e.printStackTrace();
      }
    }

    return result;
  }
Ejemplo n.º 21
0
  // Returns true on success, false otherwise.
  @CalledByNative
  public boolean allocate(int width, int height, int frameRate) {
    Log.d(TAG, "allocate: requested (" + width + "x" + height + ")@" + frameRate + "fps");
    try {
      mCamera = Camera.open(mId);
    } catch (RuntimeException ex) {
      Log.e(TAG, "allocate: Camera.open: " + ex);
      return false;
    }

    Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
    Camera.getCameraInfo(mId, cameraInfo);
    mCameraOrientation = cameraInfo.orientation;
    mCameraFacing = cameraInfo.facing;
    mDeviceOrientation = getDeviceOrientation();
    Log.d(
        TAG,
        "allocate: orientation dev="
            + mDeviceOrientation
            + ", cam="
            + mCameraOrientation
            + ", facing="
            + mCameraFacing);

    Camera.Parameters parameters = mCamera.getParameters();

    // getSupportedPreviewFpsRange() returns a List with at least one
    // element, but when camera is in bad state, it can return null pointer.
    List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
    if (listFpsRange == null || listFpsRange.size() == 0) {
      Log.e(TAG, "allocate: no fps range found");
      return false;
    }
    int frameRateInMs = frameRate * 1000;
    // Use the first range as default.
    int[] fpsMinMax = listFpsRange.get(0);
    int newFrameRate = (fpsMinMax[0] + 999) / 1000;
    for (int[] fpsRange : listFpsRange) {
      if (fpsRange[0] <= frameRateInMs && frameRateInMs <= fpsRange[1]) {
        fpsMinMax = fpsRange;
        newFrameRate = frameRate;
        break;
      }
    }
    frameRate = newFrameRate;
    Log.d(TAG, "allocate: fps set to " + frameRate);

    // Calculate size.
    List<Camera.Size> listCameraSize = parameters.getSupportedPreviewSizes();
    int minDiff = Integer.MAX_VALUE;
    int matchedWidth = width;
    int matchedHeight = height;
    for (Camera.Size size : listCameraSize) {
      int diff = Math.abs(size.width - width) + Math.abs(size.height - height);
      Log.d(TAG, "allocate: supported (" + size.width + ", " + size.height + "), diff=" + diff);
      // TODO(wjia): Remove this hack (forcing width to be multiple
      // of 32) by supporting stride in video frame buffer.
      // Right now, VideoCaptureController requires compact YV12
      // (i.e., with no padding).
      if (diff < minDiff && (size.width % 32 == 0)) {
        minDiff = diff;
        matchedWidth = size.width;
        matchedHeight = size.height;
      }
    }
    if (minDiff == Integer.MAX_VALUE) {
      Log.e(TAG, "allocate: can not find a multiple-of-32 resolution");
      return false;
    }

    mCaptureFormat =
        new CaptureFormat(matchedWidth, matchedHeight, frameRate, BuggyDeviceHack.getImageFormat());
    // Hack to avoid certain capture resolutions under a minimum one,
    // see http://crbug.com/305294
    BuggyDeviceHack.applyMinDimensions(mCaptureFormat);
    Log.d(TAG, "allocate: matched (" + mCaptureFormat.mWidth + "x" + mCaptureFormat.mHeight + ")");

    if (parameters.isVideoStabilizationSupported()) {
      Log.d(
          TAG,
          "Image stabilization supported, currently: "
              + parameters.getVideoStabilization()
              + ", setting it.");
      parameters.setVideoStabilization(true);
    } else {
      Log.d(TAG, "Image stabilization not supported.");
    }
    parameters.setPreviewSize(mCaptureFormat.mWidth, mCaptureFormat.mHeight);
    parameters.setPreviewFormat(mCaptureFormat.mPixelFormat);
    parameters.setPreviewFpsRange(fpsMinMax[0], fpsMinMax[1]);
    mCamera.setParameters(parameters);

    // Set SurfaceTexture. Android Capture needs a SurfaceTexture even if
    // it is not going to be used.
    mGlTextures = new int[1];
    // Generate one texture pointer and bind it as an external texture.
    GLES20.glGenTextures(1, mGlTextures, 0);
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]);
    // No mip-mapping with camera source.
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
    // Clamp to edge is only option.
    GLES20.glTexParameteri(
        GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(
        GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);

    mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
    mSurfaceTexture.setOnFrameAvailableListener(null);

    try {
      mCamera.setPreviewTexture(mSurfaceTexture);
    } catch (IOException ex) {
      Log.e(TAG, "allocate: " + ex);
      return false;
    }

    int bufSize =
        mCaptureFormat.mWidth
            * mCaptureFormat.mHeight
            * ImageFormat.getBitsPerPixel(mCaptureFormat.mPixelFormat)
            / 8;
    for (int i = 0; i < NUM_CAPTURE_BUFFERS; i++) {
      byte[] buffer = new byte[bufSize];
      mCamera.addCallbackBuffer(buffer);
    }
    mExpectedFrameSize = bufSize;

    return true;
  }
Ejemplo n.º 22
0
 /** Forwards call to Camera.setPreviewTexture */
 public void setPreviewTexture(SurfaceTexture surfaceTexture) throws IOException {
   mSurfaceTexture = surfaceTexture;
   mCamera.setPreviewTexture(surfaceTexture);
 }
Ejemplo n.º 23
0
  private void startCaptureOnCameraThread(
      final int width,
      final int height,
      final int framerate,
      final CapturerObserver frameObserver,
      final Context applicationContext) {
    Throwable error = null;
    checkIsOnCameraThread();
    if (camera != null) {
      throw new RuntimeException("Camera has already been started.");
    }
    this.applicationContext = applicationContext;
    this.frameObserver = frameObserver;
    this.firstFrameReported = false;

    try {
      try {
        synchronized (cameraIdLock) {
          Logging.d(TAG, "Opening camera " + id);
          if (eventsHandler != null) {
            eventsHandler.onCameraOpening(id);
          }
          camera = Camera.open(id);
          info = new Camera.CameraInfo();
          Camera.getCameraInfo(id, info);
        }
      } catch (RuntimeException e) {
        openCameraAttempts++;
        if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
          Logging.e(TAG, "Camera.open failed, retrying", e);
          openCameraOnCodecThreadRunner =
              new Runnable() {
                @Override
                public void run() {
                  startCaptureOnCameraThread(
                      width, height, framerate, frameObserver, applicationContext);
                }
              };
          cameraThreadHandler.postDelayed(openCameraOnCodecThreadRunner, OPEN_CAMERA_DELAY_MS);
          return;
        }
        openCameraAttempts = 0;
        throw e;
      }

      try {
        camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
      } catch (IOException e) {
        Logging.e(TAG, "setPreviewTexture failed", error);
        throw new RuntimeException(e);
      }

      Logging.d(
          TAG,
          "Camera orientation: "
              + info.orientation
              + " .Device orientation: "
              + getDeviceOrientation());
      camera.setErrorCallback(cameraErrorCallback);
      startPreviewOnCameraThread(width, height, framerate);
      frameObserver.onCapturerStarted(true);

      // Start camera observer.
      cameraThreadHandler.postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
      return;
    } catch (RuntimeException e) {
      error = e;
    }
    Logging.e(TAG, "startCapture failed", error);
    stopCaptureOnCameraThread();
    frameObserver.onCapturerStarted(false);
    if (eventsHandler != null) {
      eventsHandler.onCameraError("Camera can not be started.");
    }
    return;
  }
Ejemplo n.º 24
0
 public void setPreview() throws IOException {
   if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
     mSf = new SurfaceTexture(10);
     mCamera.setPreviewTexture(mSf);
   } else mCamera.setPreviewDisplay(null);
 }