public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
    // If your preview can change or rotate, take care of those events here.
    // Make sure to stop the preview before resizing or reformatting it.

    if (mHolder.getSurface() == null) {
      // preview surface does not exist
      return;
    }

    // stop preview before making changes
    try {
      mCamera.stopPreview();
    } catch (Exception e) {
      // ignore: tried to stop a non-existent preview
    }

    // set preview size and make any resize, rotate or
    // reformatting changes here

    // start preview with new settings
    try {
      mCamera.setPreviewDisplay(mHolder);

      // reference former
      Camera.Parameters parameters = mCamera.getParameters();
      int capWidth = parameters.getPreviewSize().width;
      int capHeight = parameters.getPreviewSize().height;
      // android.graphics.imageformat
      int pixelformat = parameters.getPreviewFormat();
      PixelFormat pixelinfo = new PixelFormat();
      PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo);
      int cameraIndex = 0;
      boolean frontFacing = false;

      Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
      Camera.getCameraInfo(cameraIndex, cameraInfo);
      if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
        frontFacing = true;
      }
      // For the default NV21 format, bitsPerPixel = 12.
      int bufSize = capWidth * capHeight * pixelinfo.bitsPerPixel / 8;

      for (int i = 0; i < 5; i++) {
        mCamera.addCallbackBuffer(new byte[bufSize]);
      }

      mCamera.startPreview();

      nftSimpleActivity.nativeVideoInit(capWidth, capHeight, cameraIndex, frontFacing);
    } catch (Exception e) {
      Log.d(TAG, "Error starting camera preview: " + e.getMessage());
    }
  }
        @Override
        public void onPreviewFrame(byte[] data, Camera camera) {
          if (!mMoSyncPreviewFrameEventEnabled) {
            if (!mMoSyncPreviewAutoFocusEventEnabled) {
              // Return the buffer if in use
              if (mIsUsingPreviewCallbackBuffer) camera.addCallbackBuffer(data);

              return;
            }

            if (!mMoSyncPreviewHasFocus) {
              // Return the buffer if in use
              if (mIsUsingPreviewCallbackBuffer) camera.addCallbackBuffer(data);

              return;
            }

            // Restore the flag for the next auto focus event
            mMoSyncPreviewHasFocus = false;
          }

          if (!mSendEvent) {
            // Return the buffer if in use
            if (mIsUsingPreviewCallbackBuffer) camera.addCallbackBuffer(data);

            return;
          }

          lock.lock();

          try {
            if (data != null) {
              mSendEvent = false;

              System.arraycopy(data, 0, mImageBuffer, 0, data.length);

              // To the time consuming task in a new thread
              new Thread(
                      new Runnable() {
                        public void run() {
                          YUV420toRGB8888();

                          int[] event = new int[1];
                          event[0] = MAAPI_consts.EVENT_TYPE_CAMERA_PREVIEW;
                          mMoSyncThread.postEvent(event);
                        }
                      })
                  .start();
            }

          } catch (Exception e) {
            Log.i("Camera API", "Got exception:" + e.toString());
          } finally {
            lock.unlock();

            // Return the buffer if in use
            if (mIsUsingPreviewCallbackBuffer) camera.addCallbackBuffer(data);
          }
        }
 @Override
 public void onPreviewFrame(byte[] data, Camera camera) {
   mPreviewBufferLock.lock();
   try {
     if (!mIsRunning) {
       return;
     }
     if (data.length == mExpectedFrameSize) {
       int rotation = getDeviceOrientation();
       if (rotation != mDeviceOrientation) {
         mDeviceOrientation = rotation;
         Log.d(
             TAG,
             "onPreviewFrame: device orientation="
                 + mDeviceOrientation
                 + ", camera orientation="
                 + mCameraOrientation);
       }
       if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
         rotation = 360 - rotation;
       }
       rotation = (mCameraOrientation + rotation) % 360;
       nativeOnFrameAvailable(
           mNativeVideoCaptureDeviceAndroid, data, mExpectedFrameSize, rotation);
     }
   } finally {
     mPreviewBufferLock.unlock();
     if (camera != null) {
       camera.addCallbackBuffer(data);
     }
   }
 }
  @Override
  public void onPreviewFrame(byte[] data, Camera cam) {

    nftBookActivity.nativeVideoFrame(data);

    cam.addCallbackBuffer(data);
  }
  private void setPreviewCallback() {
    if (mCamera == null) {
      android.util.Log.e("MOSYNC INTERNAL", "No Preview set");
      return;
    }

    try {
      // We have to use and static instance of the camera in the reflection here
      mSetPreviewCallbackWithBuffer =
          mCamera
              .getClass()
              .getMethod("setPreviewCallbackWithBuffer", Camera.PreviewCallback.class);

      Camera.Parameters parameters = getCurrentParameters();
      Camera.Size size = parameters.getPreviewSize();

      mCallbackBuffer = new byte[size.width * size.height * 4];

      mCamera.addCallbackBuffer(mCallbackBuffer);

      mIsUsingPreviewCallbackBuffer = true;

      mCamera.setPreviewCallbackWithBuffer(previewCallback);

    } catch (NoSuchMethodException nsme) {
      mIsUsingPreviewCallbackBuffer = false;

      mCamera.setPreviewCallback(previewCallback);
    }
  }
Exemple #6
0
 private void takePicture(byte[] imageData) {
   YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null);
   yuvImage.compressToJpeg(r, 100, out);
   File dir = new File(Environment.getExternalStorageDirectory() + "/cellbot/pictures");
   dir.mkdirs();
   FileOutputStream outStream;
   try {
     String picName = dir.toString() + "/" + System.currentTimeMillis() + ".jpg";
     outStream = new FileOutputStream(picName);
     outStream.write(out.toByteArray());
     outStream.flush();
     outStream.close();
     Log.e("Picture saved:", picName);
   } catch (FileNotFoundException e) {
     // TODO Auto-generated catch block
     e.printStackTrace();
   } catch (IOException e) {
     // TODO Auto-generated catch block
     e.printStackTrace();
   } finally {
     needToTakePicture = false;
     out.reset();
     if (mCamera != null) {
       mCamera.addCallbackBuffer(mCallbackBuffer);
     }
   }
 }
Exemple #7
0
  public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
    mHolder.setFixedSize(w, h);
    // Start the preview
    Parameters params = mCamera.getParameters();
    previewHeight = params.getPreviewSize().height;
    previewWidth = params.getPreviewSize().width;
    previewFormat = params.getPreviewFormat();

    // Crop the edges of the picture to reduce the image size
    r =
        new Rect(
            previewShrink,
            previewShrink,
            previewWidth - previewShrink,
            previewHeight - previewShrink);

    mCallbackBuffer = new byte[497664];

    mCamera.setParameters(params);
    mCamera.setPreviewCallbackWithBuffer(
        new PreviewCallback() {
          public void onPreviewFrame(byte[] imageData, Camera arg1) {
            convWorker.nextFrame(imageData);
          }
        });
    mCamera.addCallbackBuffer(mCallbackBuffer);
    mCamera.startPreview();
    setTorchMode(mTorchMode);
  }
Exemple #8
0
 // Constructor
 public AudioSynth(
     final Camera camera,
     AudioTrack redSynthesizer,
     AudioTrack greenSynthesizer,
     AudioTrack blueSynthesizer) {
   mFinished = false;
   mRGBData = null;
   mRedSynthesizer = redSynthesizer;
   mGreenSynthesizer = greenSynthesizer;
   mBlueSynthesizer = blueSynthesizer;
   redGeneratedSnd = new short[maxSamples];
   greenGeneratedSnd = new short[maxSamples];
   blueGeneratedSnd = new short[maxSamples];
   mRedHistogram = new int[256];
   mGreenHistogram = new int[256];
   mBlueHistogram = new int[256];
   mBinSquared = new double[256];
   for (int bin = 0; bin < 256; bin++) {
     mBinSquared[bin] = ((double) bin) * bin;
   }
   Camera.Parameters params = camera.getParameters();
   mImageWidth = params.getPreviewSize().width;
   mImageHeight = params.getPreviewSize().height;
   int yStride = (int) Math.ceil(mImageWidth / 16.0) * 16;
   int uvStride = (int) Math.ceil((yStride / 2) / 16.0) * 16;
   int ySize = yStride * mImageHeight;
   int uvSize = uvStride * mImageHeight / 2;
   final int size = ySize + uvSize * 2;
   mYUVBuffer = new byte[size];
   camera.addCallbackBuffer(mYUVBuffer);
   mRGBData = new int[mImageWidth * mImageHeight];
 }
  private int tryStartCapture(int width, int height, int frameRate) {
    if (camera == null) {
      Log.e(TAG, "Camera not initialized %d" + id);
      return -1;
    }

    Log.d(
        TAG,
        "tryStartCapture: "
            + width
            + "x"
            + height
            + ", frameRate: "
            + frameRate
            + ", isCaptureRunning: "
            + isCaptureRunning
            + ", isSurfaceReady: "
            + isSurfaceReady
            + ", isCaptureStarted: "
            + isCaptureStarted);

    if (isCaptureRunning || !isCaptureStarted) {
      return 0;
    }

    CaptureCapabilityAndroid currentCapability = new CaptureCapabilityAndroid();
    currentCapability.width = width;
    currentCapability.height = height;
    currentCapability.maxFPS = frameRate;
    PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);

    Camera.Parameters parameters = camera.getParameters();
    parameters.setPreviewSize(currentCapability.width, currentCapability.height);
    parameters.setPreviewFormat(PIXEL_FORMAT);
    parameters.setPreviewFrameRate(currentCapability.maxFPS);
    try {
      camera.setParameters(parameters);
    } catch (RuntimeException e) {
      Log.e(TAG, "setParameters failed", e);
      return -1;
    }

    int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
    byte[] buffer = null;
    for (int i = 0; i < numCaptureBuffers; i++) {
      buffer = new byte[bufSize];
      camera.addCallbackBuffer(buffer);
    }
    camera.setPreviewCallbackWithBuffer(this);
    ownsBuffers = true;

    camera.startPreview();
    previewBufferLock.lock();
    expectedFrameSize = bufSize;
    isCaptureRunning = true;
    previewBufferLock.unlock();

    return 0;
  }
  /** 开启相机拍摄 */
  public void startCapture() {
    try {
      cameraInfo = new CameraInfo();
      if (mCamera == null) {
        // mCamera = Camera.open();
        camera_count = Camera.getNumberOfCameras();
        Log.e(TAG, "camera count:" + camera_count);
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
          for (int i = 0; i < camera_count; i++) {
            CameraInfo info = new CameraInfo();
            Camera.getCameraInfo(i, info);
            // find front camera
            if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
              Log.e(TAG, "to open front camera");
              mCamera = Camera.open(i);
              Camera.getCameraInfo(i, cameraInfo);
            }
          }
        }
        if (mCamera == null) {
          Log.e(TAG, "AAAAA OPEN camera");
          mCamera = Camera.open();
          Camera.getCameraInfo(0, cameraInfo);
        }
      }

      mCamera.stopPreview();
      mParameters = mCamera.getParameters();
      if (isScreenOriatationPortrait()) {
        if (cameraInfo.orientation == 270 || cameraInfo.orientation == 0)
          mCamera.setDisplayOrientation(90);
        if (cameraInfo.orientation == 90) mCamera.setDisplayOrientation(270);
      } else {
        if (cameraInfo.orientation == 90) mCamera.setDisplayOrientation(180);
      }

      mParameters.setPreviewSize(mwidth, mheight);
      mParameters.setPreviewFrameRate(15);
      mCamera.setParameters(mParameters);
      int mformat = mParameters.getPreviewFormat();
      int bitsperpixel = ImageFormat.getBitsPerPixel(mformat);
      Log.e(TAG, "pzy bitsperpixel: " + bitsperpixel);
      yuv_frame = new byte[mwidth * mheight * bitsperpixel / 8];
      yuv_Rotate90 = new byte[mwidth * mheight * bitsperpixel / 8];
      //            yuv_Rotate90lr = new byte[mwidth * mheight * bitsperpixel / 8];
      mCamera.addCallbackBuffer(yuv_frame);
      // mCamera.setPreviewDisplay(holder);
      mCamera.setPreviewDisplay(localSurfaceHolder);
      mCamera.setPreviewCallbackWithBuffer(this);

      EMVideoCallHelper.getInstance().setResolution(mwidth, mheight);

      mCamera.startPreview();
      Log.d(TAG, "camera start preview");
    } catch (Exception e) {
      e.printStackTrace();
      if (mCamera != null) mCamera.release();
    }
  }
 public void onPreviewFrame(byte[] frame, Camera arg1) {
   Log.d(TAG, "Preview Frame received. Frame size: " + frame.length);
   synchronized (this) {
     mFrameChain[1 - mChainIdx].put(0, 0, frame);
     this.notify();
   }
   if (mCamera != null) mCamera.addCallbackBuffer(mBuffer);
 }
Exemple #12
0
 public void onPreviewFrame(final byte[] data, final Camera camera) {
   try {
     Camera.Size size = camera.getParameters().getPreviewSize();
     processImage(data, size.width, size.height);
     camera.addCallbackBuffer(data);
   } catch (RuntimeException e) {
     // The camera has probably just been released, ignore.
   }
 }
 @Override
 public void onPreviewFrame(byte[] frame, Camera arg1) {
   if (BuildConfig.DEBUG) Log.d(TAG, "Preview Frame received. Frame size: " + frame.length);
   synchronized (this) {
     mFrameChain[mChainIdx].put(0, 0, frame);
     mCameraFrameReady = true;
     this.notify();
   }
   if (mCamera != null) mCamera.addCallbackBuffer(mBuffer);
 }
 @TargetApi(Build.VERSION_CODES.FROYO)
 public void onPreviewFrame(byte[] frame, Camera arg1) {
   Log.i(TAG, "Preview Frame received. Need to create MAT and deliver it to clients");
   Log.i(TAG, "Frame size  is " + frame.length);
   synchronized (this) {
     mBaseMat.put(0, 0, frame);
     this.notify();
   }
   if (mCamera != null) mCamera.addCallbackBuffer(mBuffer);
 }
  // Called by native code.  Returns true if capturer is started.
  //
  // Note that this actually opens the camera, which can be a slow operation and
  // thus might be done on a background thread, but ViE API needs a
  // synchronous success return value so we can't do that.
  private synchronized boolean startCapture(int width, int height, int min_mfps, int max_mfps) {
    Log.d(TAG, "startCapture: " + width + "x" + height + "@" + min_mfps + ":" + max_mfps);
    Throwable error = null;
    try {
      camera = Camera.open(id);

      localPreview = ViERenderer.GetLocalRenderer();
      if (localPreview != null) {
        localPreview.addCallback(this);
        if (localPreview.getSurface() != null && localPreview.getSurface().isValid()) {
          camera.setPreviewDisplay(localPreview);
        }
      } else {
        // No local renderer (we only care about onPreviewFrame() buffers, not a
        // directly-displayed UI element).  Camera won't capture without
        // setPreview{Texture,Display}, so we create a dummy SurfaceTexture and
        // hand it over to Camera, but never listen for frame-ready callbacks,
        // and never call updateTexImage on it.
        try {
          // "42" because http://goo.gl/KaEn8
          dummySurfaceTexture = new SurfaceTexture(42);
          camera.setPreviewTexture(dummySurfaceTexture);
        } catch (IOException e) {
          throw new RuntimeException(e);
        }
      }

      Camera.Parameters parameters = camera.getParameters();
      Log.d(TAG, "isVideoStabilizationSupported: " + parameters.isVideoStabilizationSupported());
      if (parameters.isVideoStabilizationSupported()) {
        parameters.setVideoStabilization(true);
      }
      parameters.setPreviewSize(width, height);
      parameters.setPreviewFpsRange(min_mfps, max_mfps);
      int format = ImageFormat.NV21;
      parameters.setPreviewFormat(format);
      camera.setParameters(parameters);
      int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
      for (int i = 0; i < numCaptureBuffers; i++) {
        camera.addCallbackBuffer(new byte[bufSize]);
      }
      camera.setPreviewCallbackWithBuffer(this);
      camera.startPreview();
      return true;
    } catch (IOException e) {
      error = e;
    } catch (RuntimeException e) {
      error = e;
    }
    Log.e(TAG, "startCapture failed", error);
    if (camera != null) {
      stopCapture();
    }
    return false;
  }
Exemple #16
0
  public synchronized void setupCamera(int width, int height) {
    if (mCamera != null) {
      Log.i(TAG, "Setup Camera - " + width + "x" + height);
      Camera.Parameters params = mCamera.getParameters();
      List<Camera.Size> sizes = params.getSupportedPreviewSizes();
      mFrameWidth = width;
      mFrameHeight = height;

      // selecting optimal camera preview size
      {
        int minDiff = Integer.MAX_VALUE;
        for (Camera.Size size : sizes) {
          if (Math.abs(size.height - height) < minDiff) {
            mFrameWidth = size.width;
            mFrameHeight = size.height;
            minDiff = Math.abs(size.height - height);
          }
        }
      }

      params.setPreviewSize(getFrameWidth(), getFrameHeight());

      List<String> FocusModes = params.getSupportedFocusModes();
      if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
        params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
      }

      mCamera.setParameters(params);

      /* Now allocate the buffer */
      params = mCamera.getParameters();
      int size = params.getPreviewSize().width * params.getPreviewSize().height;
      size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
      mBuffer = new byte[size];
      /* The buffer where the current frame will be copied */
      mFrame = new byte[size];
      mCamera.addCallbackBuffer(mBuffer);

      /* Notify that the preview is about to be started and deliver preview size */
      onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);

      try {
        setPreview();
      } catch (IOException e) {
        Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
      }

      /* Now we can start a preview */
      mCamera.startPreview();
    }
  }
Exemple #17
0
  private void initializeCamera() {

    Camera.Parameters p = mCamera.getParameters();
    // Hack for Samsung devices (e.g. Nexus 5) ; otherwise callback is never called
    // See http://stackoverflow.com/questions/5788993/use-android-camera-without-surface-view
    try {
      mCameraTexture = new SurfaceTexture(10);
      mCamera.setPreviewTexture(mCameraTexture);
    } catch (Throwable ignored) {

    }

    // Choose smallest preview size available
    List<Camera.Size> sizes = p.getSupportedPictureSizes();
    int bestIndex = -1;
    int pixelCount = Integer.MAX_VALUE;

    for (int i = 0; i < sizes.size(); i++) {
      int w = sizes.get(i).width, h = sizes.get(i).height;
      int count = w * h;
      if (count < pixelCount) {
        bestIndex = i;
        pixelCount = count; // Note pixelCount is reused below
      }
    }

    Camera.Size size = sizes.get(bestIndex);

    mCameraWidth = size.width;
    mCameraHeight = size.height;

    Log.d(TAG, "W:" + mCameraWidth + ",H:" + mCameraHeight + ", num pixels:" + pixelCount);
    mCameraDecodedRGB = new int[pixelCount];

    p.setPreviewSize(mCameraWidth, mCameraHeight);
    p.setPreviewFormat(ImageFormat.NV21);
    mCamera.setParameters(p);

    int bitsPerPixel = ImageFormat.getBitsPerPixel(p.getPreviewFormat());
    int bytes = (pixelCount * bitsPerPixel) / 8;
    // Double Buffer - allow a buffer to be filled with new data while we are using the other one
    mCamera.addCallbackBuffer(new byte[bytes]);
    mCamera.addCallbackBuffer(new byte[bytes]);

    mCamera.setPreviewCallbackWithBuffer(this);
    mPreviewRunning = true;

    mCamera.startPreview();
    Log.d(TAG, "startPreview()");
  }
  @SuppressLint("NewApi") // CameraInfo
  @SuppressWarnings("deprecation") // setPreviewFrameRate
  @Override
  public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {

    if (camera != null) {

      String camResolution =
          PreferenceManager.getDefaultSharedPreferences(getContext())
              .getString(
                  "pref_cameraResolution",
                  getResources().getString(R.string.pref_defaultValue_cameraResolution));
      String[] dims = camResolution.split("x", 2);
      Camera.Parameters parameters = camera.getParameters();
      parameters.setPreviewSize(Integer.parseInt(dims[0]), Integer.parseInt(dims[1]));
      parameters.setPreviewFrameRate(30);
      camera.setParameters(parameters);

      parameters = camera.getParameters();
      int capWidth = parameters.getPreviewSize().width;
      int capHeight = parameters.getPreviewSize().height;
      int pixelformat = parameters.getPreviewFormat(); // android.graphics.imageformat
      PixelFormat pixelinfo = new PixelFormat();
      PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo);
      int cameraIndex = 0;
      boolean frontFacing = false;
      if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
        Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
        cameraIndex =
            Integer.parseInt(
                PreferenceManager.getDefaultSharedPreferences(getContext())
                    .getString("pref_cameraIndex", "0"));
        Camera.getCameraInfo(cameraIndex, cameraInfo);
        if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) frontFacing = true;
      }

      int bufSize =
          capWidth
              * capHeight
              * pixelinfo.bitsPerPixel
              / 8; // For the default NV21 format, bitsPerPixel = 12.

      for (int i = 0; i < 5; i++) camera.addCallbackBuffer(new byte[bufSize]);

      camera.startPreview();

      nftBookActivity.nativeVideoInit(capWidth, capHeight, cameraIndex, frontFacing);
    }
  }
Exemple #19
0
 @Override
 public void onPreviewFrame(byte[] data, Camera camera) {
   if (start_flag == true) {
     // 根据屏幕方向写入及传输数据
     if (isScreenOriatationPortrait()) {
       YUV420spRotate90(yuv_Rotate90, yuv_frame, mwidth, mheight);
       YUV42left2right(yuv_Rotate90lr, yuv_Rotate90, mheight, mwidth);
       callHelper.processPreviewData(mheight, mwidth, yuv_Rotate90lr);
     } else {
       YUV42left2right(yuv_Rotate90, yuv_frame, mwidth, mheight);
       callHelper.processPreviewData(mheight, mwidth, yuv_Rotate90);
     }
   }
   camera.addCallbackBuffer(yuv_frame);
 }
        public void onPreviewFrame(byte[] data, Camera c) {

          cameraFrameTick();

          update(data, mPreviewWidth, mPreviewHeight);

          mNativeProcessTime = getNativeProcessTime();

          if (c != null) {
            c.addCallbackBuffer(mPreviewBuffer);
            c.setPreviewCallbackWithBuffer(this);
          }

          requestRender();
        }
Exemple #21
0
    /**
     * Sets the frame data received from the camera. This adds the previous unused frame buffer (if
     * present) back to the camera, and keeps a pending reference to the frame data for future use.
     */
    void setNextFrame(byte[] data, Camera camera) {
      synchronized (mLock) {
        if (mPendingFrameData != null) {
          camera.addCallbackBuffer(mPendingFrameData.array());
          mPendingFrameData = null;
        }

        // Timestamp and frame ID are maintained here, which will give downstream code some
        // idea of the timing of frames received and when frames were dropped along the way.
        mPendingTimeMillis = SystemClock.elapsedRealtime() - mStartTimeMillis;
        mPendingFrameId++;
        mPendingFrameData = mBytesToByteBuffer.get(data);

        // Notify the processor thread if it is waiting on the next frame (see below).
        mLock.notifyAll();
      }
    }
Exemple #22
0
  public synchronized void onPreviewFrame(byte[] data, Camera camera) {
    // Log.d(TAG,"onPreviewFrame ");
    Canvas canvas = null;
    try {

      canvas = mHolder.lockCanvas(null);

      if (canvas == null || !mPreviewRunning) {
        Log.d(TAG, "canvas is null or preview is not active");
        return; /* View isnt ready for the data */
      }

      int canvasWidth = canvas.getWidth();
      int canvasHeight = canvas.getHeight();

      publish(data);

      DecodePixelUtil.decodeYUV(mCameraDecodedRGB, data, mCameraWidth, mCameraHeight);

      // updateBroadcast(data);

      canvas.save();
      canvas.scale(canvasWidth / (float) mCameraWidth, canvasHeight / (float) mCameraHeight);
      canvas.drawBitmap(
          mCameraDecodedRGB,
          0 /*offset*/,
          mCameraWidth /*array stride*/,
          0,
          0,
          mCameraWidth,
          mCameraHeight,
          false,
          null);
      canvas.restore();

    } catch (Exception ex) {
      ex.printStackTrace();
    } finally {
      camera.addCallbackBuffer(data); // buffer can be re-used

      if (canvas != null) {
        mHolder.unlockCanvasAndPost(canvas);
      }
    }
  }
    // Discards previous queued buffers and adds new callback buffers to camera.
    public void queueCameraBuffers(int frameSize, Camera camera) {
      checkIsOnValidThread();
      this.camera = camera;
      this.frameSize = frameSize;

      queuedBuffers.clear();
      for (int i = 0; i < numCaptureBuffers; ++i) {
        final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
        camera.addCallbackBuffer(buffer.array());
        queuedBuffers.put(buffer.array(), buffer);
      }
      Logging.d(
          TAG,
          "queueCameraBuffers enqueued "
              + numCaptureBuffers
              + " buffers of size "
              + frameSize
              + ".");
    }
Exemple #24
0
  public void onPreviewFrame(byte[] data, Camera camera) {
    previewBufferLock.lock();

    // The following line is for debug only
    // Log.v(TAG, "preview frame length " + data.length +
    //            " context" + context);
    if (isCaptureRunning) {
      // If StartCapture has been called but not StopCapture
      // Call the C++ layer with the captured frame
      if (data.length == expectedFrameSize) {
        ProvideCameraFrame(data, expectedFrameSize, context);
        if (ownsBuffers) {
          // Give the video buffer to the camera service again.
          camera.addCallbackBuffer(data);
        }
      }
    }
    previewBufferLock.unlock();
  }
  public void setCallback() {
    int bufferSize = 0;
    int pformat;
    int bitsPerPixel;

    pformat = mCamera.getParameters().getPreviewFormat();

    PixelFormat info = new PixelFormat();
    PixelFormat.getPixelFormatInfo(pformat, info);
    bitsPerPixel = info.bitsPerPixel;

    bufferSize = mPreviewWidth * mPreviewHeight * bitsPerPixel / 8;

    mPreviewBuffer = null;

    mPreviewBuffer = new byte[bufferSize + 4096];

    mCamera.addCallbackBuffer(mPreviewBuffer);
    mCamera.setPreviewCallbackWithBuffer(mCameraCallback);
  }
Exemple #26
0
 private void uploadImage(byte[] imageData) {
   try {
     YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null);
     yuvImage.compressToJpeg(r, 20, out); // Tweak the quality here - 20
     // seems pretty decent for quality + size.
     if (putUrl.contains("127.0.0.1") || putUrl.contains("localhost")) {
       try {
         Thread.sleep(50);
       } catch (InterruptedException e) {
         e.printStackTrace();
       }
       resetConnection();
     }
     PutMethod put = new PutMethod(putUrl);
     put.setRequestBody(new ByteArrayInputStream(out.toByteArray()));
     int result = put.execute(mHttpState, mConnection);
     // Log.e("result", result + "");
   } catch (UnsupportedEncodingException e) {
     Log.e(TAG, "UnsupportedEncodingException: Error uploading image: " + e.getMessage());
   } catch (IllegalStateException e) {
     Log.e(TAG, "IllegalStateException: Error uploading image: " + e.getMessage());
     resetConnection();
   } catch (ClientProtocolException e) {
     Log.e(TAG, "ClientProtocolException: Error uploading image: " + e.getMessage());
     resetConnection();
   } catch (UnknownHostException e) {
     Log.e(TAG, "UnknownHostException: Error uploading image: " + e.getMessage());
     resetConnection();
   } catch (NoHttpResponseException e) {
     // Silently ignore this.
   } catch (IOException e) {
     Log.e(TAG, "IOException: Error uploading image: " + e.getMessage());
     resetConnection();
   } finally {
     out.reset();
     if (mCamera != null) {
       mCamera.addCallbackBuffer(mCallbackBuffer);
     }
     isUploading = false;
   }
 }
Exemple #27
0
 private void appEngineUploadImage(byte[] imageData) {
   Log.e("app engine remote eyes", "called");
   try {
     YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null);
     yuvImage.compressToJpeg(r, 20, out); // Tweak the quality here - 20
     // seems pretty decent for
     // quality + size.
     Log.e("app engine remote eyes", "upload starting");
     HttpPost httpPost = new HttpPost(postUrl);
     Log.e("app engine perf", "0");
     MultipartEntity entity = new MultipartEntity();
     Log.e("app engine perf", "1");
     entity.addPart(
         "img", new InputStreamBody(new ByteArrayInputStream(out.toByteArray()), "video.jpg"));
     Log.e("app engine perf", "2");
     httpPost.setEntity(entity);
     Log.e("app engine perf", "3");
     HttpResponse response = httpclient.execute(httpPost);
     Log.e("app engine remote eyes", "result: " + response.getStatusLine());
     Log.e("app engine remote eyes", "upload complete");
   } catch (UnsupportedEncodingException e) {
     // TODO Auto-generated catch block
     e.printStackTrace();
   } catch (IllegalStateException e) {
     e.printStackTrace();
     resetAppEngineConnection();
   } catch (ClientProtocolException e) {
     e.printStackTrace();
     resetAppEngineConnection();
   } catch (IOException e) {
     e.printStackTrace();
     resetAppEngineConnection();
   } finally {
     out.reset();
     if (mCamera != null) {
       mCamera.addCallbackBuffer(mCallbackBuffer);
     }
     isUploading = false;
     Log.e("app engine remote eyes", "finished");
   }
 }
Exemple #28
0
 @Override
 public void onPreviewFrame(byte[] data, Camera camera) {
   int[] rgb = new int[mDataLength];
   mYUVProcessor.processYUV420SP(rgb, data, mPreviewWidth, mPreviewHeight);
   Bitmap bmp = Bitmap.createBitmap(rgb, mPreviewWidth, mPreviewHeight, Bitmap.Config.ARGB_8888);
   setBitmap(bmp);
   // n++;
   // if (n == 10) {
   // debugString = Double
   // .toString(10000 / (System.currentTimeMillis() - lastFrame))
   // + " FPS";
   // lastFrame = System.currentTimeMillis();
   // n = 0;
   // }
   drawIntoBitmap(0);
   invalidate();
   if (use3D) {
     updateTexture();
   }
   camera.addCallbackBuffer(data);
 }
    public void returnBuffer(long timeStamp) {
      checkIsOnValidThread();
      final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp);
      if (returnedFrame == null) {
        throw new RuntimeException(
            "unknown data buffer with time stamp " + timeStamp + "returned?!?");
      }

      if (camera != null && returnedFrame.capacity() == frameSize) {
        camera.addCallbackBuffer(returnedFrame.array());
        if (queuedBuffers.isEmpty()) {
          Logging.d(
              TAG,
              "Frame returned when camera is running out of capture"
                  + " buffers for TS "
                  + TimeUnit.NANOSECONDS.toMillis(timeStamp));
        }
        queuedBuffers.put(returnedFrame.array(), returnedFrame);
        return;
      }

      if (returnedFrame.capacity() != frameSize) {
        Logging.d(
            TAG,
            "returnBuffer with time stamp "
                + TimeUnit.NANOSECONDS.toMillis(timeStamp)
                + " called with old frame size, "
                + returnedFrame.capacity()
                + ".");
        // Since this frame has the wrong size, don't requeue it. Frames with the correct size are
        // created in queueCameraBuffers so this must be an old buffer.
        return;
      }

      Logging.d(
          TAG,
          "returnBuffer with time stamp "
              + TimeUnit.NANOSECONDS.toMillis(timeStamp)
              + " called after camera has been stopped.");
    }
Exemple #30
0
  public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
    mHolder.setFixedSize(w, h);
    // Start the preview
    Parameters params = mCamera.getParameters();
    previewHeight = params.getPreviewSize().height;
    previewWidth = params.getPreviewSize().width;
    previewFormat = params.getPreviewFormat();

    // Crop the edges of the picture to reduce the image size
    r = new Rect(80, 20, previewWidth - 80, previewHeight - 20);

    mCallbackBuffer = new byte[460800];

    mCamera.setParameters(params);
    mCamera.setPreviewCallbackWithBuffer(
        new PreviewCallback() {
          public void onPreviewFrame(byte[] imageData, Camera arg1) {
            final byte[] data = imageData;
            if (!isUploading) {
              if (needToTakePicture) {
                takePicture(imageData);
              } else {
                isUploading = true;
                new Thread(
                        new Runnable() {
                          @Override
                          public void run() {
                            uploadImage(data);
                          }
                        })
                    .start();
                // appEngineUploadImage(imageData);
              }
            }
          }
        });
    mCamera.addCallbackBuffer(mCallbackBuffer);
    mCamera.startPreview();
    setTorchMode(mTorchMode);
  }