/**
   * Creates a new CaptureRenderer, based on an Inside3dRenderer with the given mesh as Skybox.
   *
   * @param context - Context of the application.
   * @param skybox
   * @param cameraManager
   */
  public CaptureRenderer(Context context, Cube skybox, CameraManager cameraManager) {
    // based on Inside3dRenderer. We are inside a skybox.
    super(context);
    mSkybox = skybox;
    super.setSurroundingMesh(mSkybox);
    super.setFovDeg(DEFAULT_FOV_DEG);

    // init attributes
    mCameraManager = cameraManager;
    mCameraManager.addSnapshotEventListener(this);
    mContext = context;

    mMarkerBitmap = BitmapDecoder.safeDecodeBitmap(mContext.getResources(), MARKER_RESSOURCE_ID);
    mContourBitmap = BitmapDecoder.safeDecodeBitmap(mContext.getResources(), CONTOUR_RESSOURCE_ID);

    // if auto samplig enabled
    if (mSampleRate == 0) {
      mSampleRate = (int) mCameraManager.getCameraResolution();
      mSampleRate = (int) (ceilPowOf2(mSampleRate) / SNAPSHOT_AUTO_SAMPLE_RATIO);
    }

    mViewMatrix = new float[16];
    Matrix.setIdentityM(mViewMatrix, 0);

    // create dots and snapshot lists
    mSnapshots = new ArrayList<Snapshot3D>();
    mDots = new LinkedList<Snapshot3D>();
    mContours43 = new LinkedList<Snapshot3D>();
    mContours34 = new LinkedList<Snapshot3D>();
    mContours = mContours43;
    mTargetsLock = new ReentrantLock();
    mSnapshotsLock = new ReentrantLock();
  }
 /**
  * @param args
  * @throws IOException
  */
 public static void main(String[] args) throws IOException {
   // TODO Auto-generated method stub
   CameraManager ca = new CameraManager();
   // String a = "59.64.156.170";
   // String b = "59.64.156.235";
   ArrayList<String> s = new ArrayList<String>();
   // s.add(a);
   BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream("config")));
   String tem = null;
   while ((tem = in.readLine()) != null) {
     s.add(tem);
   }
   ca.addCamera(s);
 }
 private void restartPreviewAndDecode() {
   if (state == State.SUCCESS) {
     state = State.PREVIEW;
     cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.zxing_decode);
     activity.drawViewfinder();
   }
 }
 @Override
 public void handleMessage(Message message) {
   switch (message.what) {
     case ZXING_RESTART:
       restartPreviewAndDecode();
       break;
     case R.id.zxing_decode_succeeded:
       state = State.SUCCESS;
       Bundle bundle = message.getData();
       Bitmap barcode = null;
       if (bundle != null) {
         byte[] compressedBitmap = bundle.getByteArray(DecodeThread.BARCODE_BITMAP);
         if (compressedBitmap != null) {
           barcode =
               BitmapFactory.decodeByteArray(compressedBitmap, 0, compressedBitmap.length, null);
           // Mutable copy:
           barcode = barcode.copy(Bitmap.Config.ARGB_8888, true);
         }
       }
       activity.handleDecode((Result) message.obj, barcode);
       break;
     case R.id.zxing_decode_failed:
       // We're decoding as fast as possible, so when one decode fails, start another.
       state = State.PREVIEW;
       cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.zxing_decode);
       break;
   }
 }
Exemple #5
0
 protected void onPause() {
   super.onPause();
   if (this.handler != null) {
     this.handler.quitSynchronously();
     this.handler = null;
   }
   CameraManager.get().closeDriver();
 }
Exemple #6
0
 public void onCreate(Bundle paramBundle) {
   super.onCreate(paramBundle);
   setContentView(2130903048);
   CameraManager.init(getApplication());
   this.viewfinderView = ((ViewfinderView) findViewById(2131361873));
   this.txtResult = ((TextView) findViewById(2131361874));
   this.hasSurface = false;
   this.inactivityTimer = new InactivityTimer(this);
 }
  public CaptureActivityHandler(CameraActivity activity, CameraManager cameraManager) {
    this.activity = activity;
    decodeThread = new DecodeThread(activity);
    decodeThread.start();
    state = State.SUCCESS;

    // Start ourselves capturing previews and decoding.
    this.cameraManager = cameraManager;
    cameraManager.startPreview();
    restartPreviewAndDecode();
  }
  /** Get instance of a camera here and initialize preview holder */
  public void surfaceCreated(SurfaceHolder holder) {
    try {
      cameraManager.openCamera(holder);

      if (handler == null) {
        handler = new CameraActivityHandler(context, cameraManager);
      }
    } catch (IOException e) {
      e.printStackTrace();
    }
  }
  public void turnOn() {

    try {
      // m_Camera = Camera.open();
      m_Camera = CameraManager.get().getCamera();
      Camera.Parameters mParameters;
      mParameters = m_Camera.getParameters();
      mParameters.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
      m_Camera.setParameters(mParameters);
    } catch (Exception ex) {
    }
  }
  public void turnOff() {

    try {
      m_Camera = CameraManager.get().getCamera();
      Camera.Parameters mParameters;
      mParameters = m_Camera.getParameters();
      mParameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
      m_Camera.setParameters(mParameters);
      // m_Camera.stopPreview();
      // m_Camera.release();
    } catch (Exception ex) {
    }
  }
Exemple #11
0
 private void initCamera(SurfaceHolder paramSurfaceHolder) {
   try {
     CameraManager.get().openDriver(paramSurfaceHolder);
     if (this.handler == null)
       this.handler = new CaptureActivityHandler(this, this.decodeFormats, this.characterSet);
     label34:
     return;
   } catch (IOException localIOException) {
     break label34;
   } catch (RuntimeException localRuntimeException) {
     break label34;
   }
 }
 @Override
 public void handleMessage(Message msg) {
   switch (msg.what) {
     case R.id.decoded:
       String data = msg.getData().getString(DECODED_DATA);
       Toast.makeText(context, data, Toast.LENGTH_LONG).show();
       if (callback != null) {
         callback.onDecoded(data);
       }
       break;
     case R.id.decode_failed:
       // getting new frame
       cameraManager.requestNextFrame(new PreviewCallback(this, cameraManager));
       break;
   }
 }
  public void quitSynchronously() {
    state = State.DONE;
    cameraManager.stopPreview();
    Message quit = Message.obtain(decodeThread.getHandler(), R.id.zxing_back_button);
    quit.sendToTarget();
    try {
      // Wait at most half a second; should be enough time, and onPause() will timeout quickly
      decodeThread.join(500L);
    } catch (InterruptedException e) {
      // continue
    }

    // Be absolutely sure we don't send any queued up messages
    removeMessages(R.id.zxing_decode_succeeded);
    removeMessages(R.id.zxing_decode_failed);
  }
  /**
   * Init CameraManager gl texture id, camera SurfaceTexture, bind to EXTERNAL_OES, and redirect
   * camera preview to the surfaceTexture.
   *
   * @throws IOException when camera cannot be open
   */
  private void initCameraSurface() throws IOException {

    // Gen openGL texture id
    int texture[] = new int[1];
    GLES10.glGenTextures(1, texture, 0);
    mCameraTextureId = texture[0];

    if (mCameraTextureId == 0) {
      throw new RuntimeException("Cannot create openGL texture (initCameraSurface())");
    }

    // Camera preview is redirected to SurfaceTexture.
    // SurfaceTexture works with TEXTURE_EXTERNAL_OES, so we bind this textureId so that camera
    // will automatically fill it with its video.
    GLES10.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mCameraTextureId);

    // Can't do mipmapping with camera source
    GLES10.glTexParameterf(
        GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES10.GL_TEXTURE_MIN_FILTER, GLES10.GL_LINEAR);
    GLES10.glTexParameterf(
        GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES10.GL_TEXTURE_MAG_FILTER, GLES10.GL_LINEAR);

    // Clamp to edge is the only option
    GLES10.glTexParameterf(
        GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES10.GL_TEXTURE_WRAP_S, GLES10.GL_CLAMP_TO_EDGE);
    GLES10.glTexParameterf(
        GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES10.GL_TEXTURE_WRAP_T, GLES10.GL_CLAMP_TO_EDGE);

    // create a SurfaceTexture associated to this openGL texture...
    mCameraSurfaceTex = new SurfaceTexture(mCameraTextureId);
    mCameraSurfaceTex.setDefaultBufferSize(640, 480);

    // ... and redirect camera preview to it
    mCameraManager.setPreviewSurface(mCameraSurfaceTex);

    // Setup viewfinder
    mViewFinder = new TexturedPlane(mViewFinderSize);
    mViewFinder.setTexture(
        BitmapDecoder.safeDecodeBitmap(mContext.getResources(), VIEWFINDER_RESSOURCE_ID));
    mViewFinder.recycleTexture();
    mViewFinder.translate(0, 0, VIEWFINDER_DISTANCE);
    mViewFinder.setAlpha(VIEWFINDER_ATTENUATION_ALPHA);
  }
  QrCodeCaptureActivityHandler(
      QrCodeCaptureActivity activity,
      Collection<BarcodeFormat> decodeFormats,
      Map<DecodeHintType, ?> baseHints,
      String characterSet,
      CameraManager cameraManager) {
    this.activity = activity;
    decodeThread =
        new DecodeThread(
            activity,
            decodeFormats,
            baseHints,
            characterSet,
            new ViewfinderResultPointCallback(activity.getViewfinderView()));
    decodeThread.start();
    state = State.SUCCESS;

    // Start ourselves capturing previews and decoding.
    this.cameraManager = cameraManager;
    cameraManager.startPreview();
    restartPreviewAndDecode();
  }
  @Override
  public void handleMessage(Message message) {
    switch (message.what) {
      case R.id.restart_preview:
        restartPreviewAndDecode();
        break;
      case R.id.decode_succeeded:
        state = State.SUCCESS;
        Bundle bundle = message.getData();

        activity.handleDecode((Result) message.obj, bundle);
        break;
      case R.id.decode_failed:
        // We're decoding as fast as possible, so when one decode fails,
        // start another.
        state = State.PREVIEW;
        cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
        break;
      case R.id.return_scan_result:
        activity.setResult(Activity.RESULT_OK, (Intent) message.obj);
        activity.finish();
        break;
    }
  }
 @Override
 public void openCamera() {
   cameraManager.openCamera();
 }
 @Override
 public void stopCamera() {
   cameraManager.stopCamera();
 }
 @Override
 public void setAutoFit(boolean autoFit) {
   cameraManager.setAutoFit(autoFit);
 }
 @Override
 public void setPreviewCallback(PreviewCallback previewCallback) {
   cameraManager.setPreviewCallback(previewCallback);
 }
 private void restartPreviewAndDecode() {
   if (state == State.SUCCESS) {
     state = State.PREVIEW;
     cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
   }
 }
  @Override
  public void handleMessage(Message message) {
    if (message.what == R.id.restart_preview) {
      restartPreviewAndDecode();
    } else if (message.what == R.id.decode_succeeded) {
      state = State.SUCCESS;
      Bundle bundle = message.getData();
      Bitmap barcode = null;
      float scaleFactor = 1.0f;
      if (bundle != null) {
        byte[] compressedBitmap = bundle.getByteArray(DecodeThread.BARCODE_BITMAP);
        if (compressedBitmap != null) {
          barcode =
              BitmapFactory.decodeByteArray(compressedBitmap, 0, compressedBitmap.length, null);
          // Mutable copy:
          barcode = barcode.copy(Bitmap.Config.ARGB_8888, true);
        }
        scaleFactor = bundle.getFloat(DecodeThread.BARCODE_SCALED_FACTOR);
      }
      activity.handleDecode((Result) message.obj, barcode, scaleFactor);
    } else if (message.what == R.id.decode_failed) {
      // We're decoding as fast as possible, so when one decode fails, start another.
      state = State.PREVIEW;
      cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
    } else if (message.what == R.id.return_scan_result) {
      activity.setResult(Activity.RESULT_OK, (Intent) message.obj);
      activity.finish();
    } else if (message.what == R.id.launch_product_query) {
      String url = (String) message.obj;

      Intent intent = new Intent(Intent.ACTION_VIEW);
      intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
      intent.setData(Uri.parse(url));

      ResolveInfo resolveInfo =
          activity.getPackageManager().resolveActivity(intent, PackageManager.MATCH_DEFAULT_ONLY);
      String browserPackageName = null;
      if (resolveInfo != null && resolveInfo.activityInfo != null) {
        browserPackageName = resolveInfo.activityInfo.packageName;
        Log.d(TAG, "Using browser in package " + browserPackageName);
      }

      // Needed for default Android browser / Chrome only apparently
      if ("com.android.browser".equals(browserPackageName)
          || "com.android.chrome".equals(browserPackageName)) {
        intent.setPackage(browserPackageName);
        intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
        intent.putExtra(Browser.EXTRA_APPLICATION_ID, browserPackageName);
      }

      try {
        activity.startActivity(intent);
      } catch (ActivityNotFoundException ignored) {
        Log.w(TAG, "Can't find anything to handle VIEW of URI " + url);
      }
    }
    //    switch (message.what) {
    //      case R.id.restart_preview:
    //        restartPreviewAndDecode();
    //        break;
    //      case R.id.decode_succeeded:
    //        state = State.SUCCESS;
    //        Bundle bundle = message.getData();
    //        Bitmap barcode = null;
    //        float scaleFactor = 1.0f;
    //        if (bundle != null) {
    //          byte[] compressedBitmap = bundle.getByteArray(DecodeThread.BARCODE_BITMAP);
    //          if (compressedBitmap != null) {
    //            barcode = BitmapFactory.decodeByteArray(compressedBitmap, 0,
    // compressedBitmap.length, null);
    //            // Mutable copy:
    //            barcode = barcode.copy(Bitmap.Config.ARGB_8888, true);
    //          }
    //          scaleFactor = bundle.getFloat(DecodeThread.BARCODE_SCALED_FACTOR);
    //        }
    //        activity.handleDecode((Result) message.obj, barcode, scaleFactor);
    //        break;
    //      case R.id.decode_failed:
    //        // We're decoding as fast as possible, so when one decode fails, start another.
    //        state = State.PREVIEW;
    //        cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.decode);
    //        break;
    //      case R.id.return_scan_result:
    //        activity.setResult(Activity.RESULT_OK, (Intent) message.obj);
    //        activity.finish();
    //        break;
    //      case R.id.launch_product_query:
    //        String url = (String) message.obj;
    //
    //        Intent intent = new Intent(Intent.ACTION_VIEW);
    //        intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
    //        intent.setData(Uri.parse(url));
    //
    //        ResolveInfo resolveInfo =
    //            activity.getPackageManager().resolveActivity(intent,
    // PackageManager.MATCH_DEFAULT_ONLY);
    //        String browserPackageName = null;
    //        if (resolveInfo != null && resolveInfo.activityInfo != null) {
    //          browserPackageName = resolveInfo.activityInfo.packageName;
    //          Log.d(TAG, "Using browser in package " + browserPackageName);
    //        }
    //
    //        // Needed for default Android browser / Chrome only apparently
    //        if ("com.android.browser".equals(browserPackageName) ||
    // "com.android.chrome".equals(browserPackageName)) {
    //          intent.setPackage(browserPackageName);
    //          intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
    //          intent.putExtra(Browser.EXTRA_APPLICATION_ID, browserPackageName);
    //        }
    //
    //        try {
    //          activity.startActivity(intent);
    //        } catch (ActivityNotFoundException ignored) {
    //          Log.w(TAG, "Can't find anything to handle VIEW of URI " + url);
    //        }
    //        break;
    //    }
  }
 /** Stop preview and release camera */
 public void surfaceDestroyed(SurfaceHolder holder) {
   cameraManager.closeCamera();
 }
Exemple #24
0
 public void cursor(boolean cursor) {
   inputManager.setCursorVisible(cursor);
   cameraManager.flyCamActive(cursor);
 }