@Override public boolean onKeyDown(int keyCode, KeyEvent event) { switch (keyCode) { case KeyEvent.KEYCODE_BACK: if (source == IntentSource.NATIVE_APP_INTENT) { setResult(RESULT_CANCELED); finish(); return true; } if ((source == IntentSource.NONE || source == IntentSource.ZXING_LINK) && lastResult != null) { restartPreviewAfterDelay(0L); return true; } break; case KeyEvent.KEYCODE_FOCUS: case KeyEvent.KEYCODE_CAMERA: // Handle these events so they don't launch the Camera app return true; // Use volume up/down to turn on light case KeyEvent.KEYCODE_VOLUME_DOWN: cameraManager.setTorch(false); return true; case KeyEvent.KEYCODE_VOLUME_UP: cameraManager.setTorch(true); return true; } return super.onKeyDown(keyCode, event); }
@Override public boolean onKeyDown(int keyCode, KeyEvent event) { switch (keyCode) { case KeyEvent.KEYCODE_BACK: if (source == IntentSource.NATIVE_APP_INTENT) { setResult(RESULT_CANCELED); AppManager.getAppManager().finishActivity(this); overridePendingTransition(R.anim.exit_in_from_left, R.anim.exit_out_to_right); return true; } if ((source == IntentSource.NONE || source == IntentSource.ZXING_LINK) && lastResult != null) { restartPreviewAfterDelay(0L); Logger.i("dd"); return true; } else { AppManager.getAppManager().finishActivity(this); overridePendingTransition(R.anim.exit_in_from_left, R.anim.exit_out_to_right); } break; case KeyEvent.KEYCODE_FOCUS: case KeyEvent.KEYCODE_CAMERA: // Handle these events so they don't launch the Camera app return true; // Use volume up/down to turn on light case KeyEvent.KEYCODE_VOLUME_DOWN: cameraManager.setTorch(false); return true; case KeyEvent.KEYCODE_VOLUME_UP: cameraManager.setTorch(true); return true; } return super.onKeyDown(keyCode, event); }
private void initCamera(SurfaceHolder surfaceHolder) { if (surfaceHolder == null) { throw new IllegalStateException("No SurfaceHolder provided"); } if (cameraManager.isOpen()) { Log.w(TAG, "initCamera() while already open -- late SurfaceView callback?"); return; } try { cameraManager.openDriver(surfaceHolder); // Creating the handler starts the preview, which can also throw a RuntimeException. if (handler == null) { handler = new CaptureActivityHandler(this, decodeFormats, characterSet, cameraManager); } decodeOrStoreSavedBitmap(null, null); } catch (IOException ioe) { Log.w(TAG, ioe); displayFrameworkBugMessageAndExit(); } catch (RuntimeException e) { // Barcode Scanner has seen crashes in the wild of this variety: // java.?lang.?RuntimeException: Fail to connect to camera service Log.w(TAG, "Unexpected error initializing camera", e); displayFrameworkBugMessageAndExit(); } }
private void restartPreviewAndDecode() { if (state == State.SUCCESS) { state = State.PREVIEW; CameraManager.get().requestPreviewFrame(decodeThread.getHandler(), R.id.decode); CameraManager.get().requestAutoFocus(this, R.id.auto_focus); activity.drawViewfinder(); } }
/** * Decode the data within the viewfinder rectangle, and time how long it took. For efficiency, * reuse the same reader objects from one decode to the next. * * @param data The YUV preview frame. * @param width The width of the preview frame. * @param height The height of the preview frame. */ private void decode(byte[] data, int width, int height) { long start = System.currentTimeMillis(); Result rawResult = null; PlanarYUVLuminanceSource source = CameraManager.get().buildLuminanceSource(data, width, height); BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source)); try { rawResult = multiFormatReader.decodeWithState(bitmap); } catch (ReaderException re) { // continue } finally { multiFormatReader.reset(); } if (rawResult != null) { long end = System.currentTimeMillis(); Log.d(TAG, "Found barcode (" + (end - start) + " ms):\n" + rawResult.toString()); Message message = Message.obtain(activity.getHandler(), R.id.decode_succeeded, rawResult); Bundle bundle = new Bundle(); bundle.putParcelable(DecodeThread.BARCODE_BITMAP, source.renderCroppedGreyscaleBitmap()); message.setData(bundle); // Log.d(TAG, "Sending decode succeeded message..."); message.sendToTarget(); } else { Message message = Message.obtain(activity.getHandler(), R.id.decode_failed); message.sendToTarget(); } }
@Override public void handleMessage(Message message) { if (message.what == R.id.zxing_decode_succeeded) { state = State.SUCCESS; Bundle bundle = message.getData(); Bitmap barcode = null; float scaleFactor = 1.0f; if (bundle != null) { byte[] compressedBitmap = bundle.getByteArray(DecodeThread.BARCODE_BITMAP); if (compressedBitmap != null) { barcode = BitmapFactory.decodeByteArray(compressedBitmap, 0, compressedBitmap.length, null); // Mutable copy: barcode = barcode.copy(Bitmap.Config.ARGB_8888, true); } scaleFactor = bundle.getFloat(DecodeThread.BARCODE_SCALED_FACTOR); } activity.handleDecode((Result) message.obj, barcode, scaleFactor); } else if (message.what == R.id .zxing_decode_failed) { // We're decoding as fast as possible, so when one decode fails, // start another. state = State.PREVIEW; cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.zxing_decode); } else if (message.what == R.id.zxing_return_scan_result) { activity.setResult(Activity.RESULT_OK, (Intent) message.obj); activity.finish(); } }
private void restartPreviewAndDecode() { if (state == State.SUCCESS) { state = State.PREVIEW; cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.decode); activity.drawViewfinder(); } }
@Override public void handleMessage(Message message) { if (message.what == R.id.restart_preview) { restartPreviewAndDecode(); } else if (message.what == R.id.decode_succeeded) { state = State.SUCCESS; Bundle bundle = message.getData(); Bitmap barcode = null; float scaleFactor = 1.0f; if (bundle != null) { byte[] compressedBitmap = bundle.getByteArray(DecodeThread.BARCODE_BITMAP); if (compressedBitmap != null) { barcode = BitmapFactory.decodeByteArray(compressedBitmap, 0, compressedBitmap.length, null); // Mutable copy: barcode = barcode.copy(Bitmap.Config.ARGB_8888, true); } scaleFactor = bundle.getFloat(DecodeThread.BARCODE_SCALED_FACTOR); } activity.handleDecode((Result) message.obj, barcode, scaleFactor); } else if (message.what == R.id.decode_failed) { // We're decoding as fast as possible, so when one decode fails, // start another. state = State.PREVIEW; cameraManager.requestPreviewFrame(decodeThread.getHandler(), R.id.decode); } else if (message.what == R.id.return_scan_result) { activity.setResult(Activity.RESULT_OK, (Intent) message.obj); activity.finish(); } else if (message.what == R.id.launch_product_query) { String url = (String) message.obj; Intent intent = new Intent(Intent.ACTION_VIEW); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.setData(Uri.parse(url)); ResolveInfo resolveInfo = activity.getPackageManager().resolveActivity(intent, PackageManager.MATCH_DEFAULT_ONLY); String browserPackageName = null; if (resolveInfo != null && resolveInfo.activityInfo != null) { browserPackageName = resolveInfo.activityInfo.packageName; Log.d(TAG, "Using browser in package " + browserPackageName); } // Needed for default Android browser / Chrome only apparently if ("com.android.browser".equals(browserPackageName) || "com.android.chrome".equals(browserPackageName)) { intent.setPackage(browserPackageName); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.putExtra(Browser.EXTRA_APPLICATION_ID, browserPackageName); } try { activity.startActivity(intent); } catch (ActivityNotFoundException ignored) { Log.w(TAG, "Can't find anything to handle VIEW of URI " + url); } } }
@Override protected void onPause() { super.onPause(); if (handler != null) { handler.quitSynchronously(); handler = null; } CameraManager.get().closeDriver(); }
@Override public void handleMessage(Message message) { switch (message.what) { case R.id.auto_focus: // Log.d(TAG, "Got auto-focus message"); // When one auto focus pass finishes, start another. This is the closest thing to // continuous AF. It does seem to hunt a bit, but I'm not sure what else to do. if (state == State.PREVIEW) { CameraManager.get().requestAutoFocus(this, R.id.auto_focus); } break; case R.id.restart_preview: Log.d(TAG, "Got restart preview message"); restartPreviewAndDecode(); break; case R.id.decode_succeeded: Log.d(TAG, "Got decode succeeded message"); state = State.SUCCESS; Bundle bundle = message.getData(); Bitmap barcode = bundle == null ? null : (Bitmap) bundle.getParcelable(DecodeThread.BARCODE_BITMAP); activity.handleDecode((Result) message.obj, barcode); break; case R.id.decode_failed: // We're decoding as fast as possible, so when one decode fails, start another. state = State.PREVIEW; CameraManager.get().requestPreviewFrame(decodeThread.getHandler(), R.id.decode); break; case R.id.return_scan_result: Log.d(TAG, "Got return scan result message"); activity.setResult(Activity.RESULT_OK, (Intent) message.obj); activity.finish(); break; case R.id.launch_product_query: Log.d(TAG, "Got product query message"); String url = (String) message.obj; Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); activity.startActivity(intent); break; } }
public void quitSynchronously() { state = State.DONE; CameraManager.get().stopPreview(); Message quit = Message.obtain(decodeThread.getHandler(), R.id.quit); quit.sendToTarget(); try { decodeThread.join(); } catch (InterruptedException e) { // continue } // Be absolutely sure we don't send any queued up messages removeMessages(R.id.decode_succeeded); removeMessages(R.id.decode_failed); }
@Override protected void onPause() { if (handler != null) { handler.quitSynchronously(); handler = null; } inactivityTimer.onPause(); cameraManager.closeDriver(); if (!hasSurface) { SurfaceView surfaceView = (SurfaceView) findViewById(fakeR.getId("id", "preview_view")); SurfaceHolder surfaceHolder = surfaceView.getHolder(); surfaceHolder.removeCallback(this); } super.onPause(); }
private void initCamera(SurfaceHolder surfaceHolder) { if (surfaceHolder == null) { throw new IllegalStateException("No SurfaceHolder provided"); } if (cameraManager.isOpen()) { return; } try { // 打开Camera硬件设备 cameraManager.openDriver(surfaceHolder); // 创建一个handler来打开预览,并抛出一个运行时异常 if (handler == null) { handler = new CaptureActivityHandler( this, decodeFormats, decodeHints, characterSet, cameraManager); } } catch (IOException ioe) { Log.w(TAG, ioe); displayFrameworkBugMessageAndExit(); } catch (RuntimeException e) { Log.w(TAG, "Unexpected error initializing camera", e); displayFrameworkBugMessageAndExit(); } }
CaptureActivityHandler( CaptureActivity activity, Collection<BarcodeFormat> decodeFormats, String characterSet) { this.activity = activity; decodeThread = new DecodeThread( activity, decodeFormats, characterSet, new ViewfinderResultPointCallback(activity.getViewfinderView())); decodeThread.start(); state = State.SUCCESS; // Start ourselves capturing previews and decoding. CameraManager.get().startPreview(); restartPreviewAndDecode(); }
public void quitSynchronously() { state = State.DONE; cameraManager.stopPreview(); Message quit = Message.obtain(decodeThread.getHandler(), R.id.quit); quit.sendToTarget(); try { // Wait at most half a second; should be enough time, and onPause() will timeout quickly decodeThread.join(500L); } catch (InterruptedException e) { // continue } // Be absolutely sure we don't send any queued up messages removeMessages(R.id.decode_succeeded); removeMessages(R.id.decode_failed); }
@Override public void handleMessage(Message message) { if (message.what == fakeR.getId("id", "restart_preview")) { Log.d(TAG, "Got restart preview message"); restartPreviewAndDecode(); } else if (message.what == fakeR.getId("id", "decode_succeeded")) { Log.d(TAG, "Got decode succeeded message"); state = State.SUCCESS; Bundle bundle = message.getData(); Bitmap barcode = bundle == null ? null : (Bitmap) bundle.getParcelable(DecodeThread.BARCODE_BITMAP); activity.handleDecode((Result) message.obj, barcode); } else if (message.what == fakeR.getId("id", "decode_failed")) { // We're decoding as fast as possible, so when one decode fails, start another. state = State.PREVIEW; cameraManager.requestPreviewFrame(decodeThread.getHandler(), fakeR.getId("id", "decode")); } else if (message.what == fakeR.getId("id", "return_scan_result")) { Log.d(TAG, "Got return scan result message"); activity.setResult(Activity.RESULT_OK, (Intent) message.obj); activity.finish(); } else if (message.what == fakeR.getId("id", "launch_product_query")) { Log.d(TAG, "Got product query message"); String url = (String) message.obj; Intent intent = new Intent(Intent.ACTION_VIEW); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); intent.setData(Uri.parse(url)); ResolveInfo resolveInfo = activity.getPackageManager().resolveActivity(intent, PackageManager.MATCH_DEFAULT_ONLY); String browserPackageName = null; if (resolveInfo.activityInfo != null) { browserPackageName = resolveInfo.activityInfo.packageName; Log.d(TAG, "Using browser in package " + browserPackageName); } // Needed for default Android browser only apparently if ("com.android.browser".equals(browserPackageName)) { intent.setPackage(browserPackageName); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.putExtra(Browser.EXTRA_APPLICATION_ID, browserPackageName); } try { activity.startActivity(intent); } catch (ActivityNotFoundException anfe) { Log.w(TAG, "Can't find anything to handle VIEW of URI " + url); } } }
private void initCamera(SurfaceHolder surfaceHolder) { try { CameraManager.get().openDriver(surfaceHolder); // Creating the handler starts the preview, which can also throw a RuntimeException. if (handler == null) { handler = new CaptureActivityHandler(this, decodeFormats, characterSet); } } catch (IOException ioe) { Log.w(TAG, ioe); displayFrameworkBugMessageAndExit(); } catch (RuntimeException e) { // Barcode Scanner has seen crashes in the wild of this variety: // java.?lang.?RuntimeException: Fail to connect to camera service Log.w(TAG, "Unexpected error initializating camera", e); displayFrameworkBugMessageAndExit(); } }
@Override protected void onPause() { if (handler != null) { handler.quitSynchronously(); handler = null; } inactivityTimer.onPause(); ambientLightManager.stop(); cameraManager.closeDriver(); if (!hasSurface) { Logger.i("remove callback"); SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view); SurfaceHolder surfaceHolder = surfaceView.getHolder(); surfaceHolder.removeCallback(this); } super.onPause(); }
@Override public void onCreate(Bundle icicle) { super.onCreate(icicle); Window window = getWindow(); window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setContentView(R.layout.capture); CameraManager.init(getApplication()); viewfinderView = (ViewfinderView) findViewById(R.id.viewfinder_view); resultView = findViewById(R.id.result_view); statusView = (TextView) findViewById(R.id.status_view); handler = null; lastResult = null; hasSurface = false; historyManager = new HistoryManager(this); historyManager.trimHistory(); }
/** * Decode the data within the viewfinder rectangle, and time how long it took. For efficiency, * reuse the same reader objects from one decode to the next. * * @param data The YUV preview frame. * @param width The width of the preview frame. * @param height The height of the preview frame. */ private void decode(byte[] data, int width, int height) { long start = System.currentTimeMillis(); Result rawResult = null; // XXX modify here byte[] rotatedData = new byte[data.length]; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) rotatedData[x * height + height - y - 1] = data[x + y * width]; } int tmp = width; // Here we are swapping, that's the difference to #11 width = height; height = tmp; PlanarYUVLuminanceSource source = CameraManager.get().buildLuminanceSource(rotatedData, width, height); BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source)); try { rawResult = multiFormatReader.decodeWithState(bitmap); } catch (ReaderException re) { // continue } finally { multiFormatReader.reset(); } if (rawResult != null) { long end = System.currentTimeMillis(); Log.d(TAG, "Found barcode (" + (end - start) + " ms):\n" + rawResult.toString()); Message message = Message.obtain(activity.getHandler(), R.id.zandroid_zxing_decode_succeeded, rawResult); Bundle bundle = new Bundle(); bundle.putParcelable(DecodeThread.BARCODE_BITMAP, source.renderCroppedGreyscaleBitmap()); message.setData(bundle); // Log.d(TAG, "Sending decode succeeded message..."); message.sendToTarget(); } else { Message message = Message.obtain(activity.getHandler(), R.id.zandroid_zxing_decode_failed); message.sendToTarget(); } }
@Override public void onDraw(Canvas canvas) { Rect frame = CameraManager.get().getFramingRect(); if (frame == null) { return; } int width = canvas.getWidth(); int height = canvas.getHeight(); // these are modifications to force the // view frame to be centered vertically frame = new Rect(frame); int frameHeight = frame.height(); frame.top = (height - frame.height()) / 2; frame.bottom = frame.top + frameHeight; // Draw the exterior (i.e. outside the framing rect) darkened paint.setColor(resultBitmap != null ? resultColor : maskColor); canvas.drawRect(0, 0, width, frame.top, paint); canvas.drawRect(0, frame.top, frame.left, frame.bottom + 1, paint); canvas.drawRect(frame.right + 1, frame.top, width, frame.bottom + 1, paint); canvas.drawRect(0, frame.bottom + 1, width, height, paint); if (resultBitmap != null) { // Draw the opaque result bitmap over the scanning rectangle paint.setAlpha(OPAQUE); canvas.drawBitmap(resultBitmap, frame.left, frame.top, paint); } else { // Draw a two pixel solid black border inside the framing rect paint.setColor(frameColor); canvas.drawRect(frame.left, frame.top, frame.right + 1, frame.top + 2, paint); canvas.drawRect(frame.left, frame.top + 2, frame.left + 2, frame.bottom - 1, paint); canvas.drawRect(frame.right - 1, frame.top, frame.right + 1, frame.bottom - 1, paint); canvas.drawRect(frame.left, frame.bottom - 1, frame.right + 1, frame.bottom + 1, paint); // Draw a red "laser scanner" line through the middle to show decoding is active paint.setColor(laserColor); paint.setAlpha(SCANNER_ALPHA[scannerAlpha]); scannerAlpha = (scannerAlpha + 1) % SCANNER_ALPHA.length; int middle = frame.height() / 2 + frame.top; canvas.drawRect(frame.left + 2, middle - 1, frame.right - 1, middle + 2, paint); Collection<ResultPoint> currentPossible = possibleResultPoints; Collection<ResultPoint> currentLast = lastPossibleResultPoints; if (currentPossible.isEmpty()) { lastPossibleResultPoints = null; } else { possibleResultPoints = new HashSet<ResultPoint>(5); lastPossibleResultPoints = currentPossible; paint.setAlpha(OPAQUE); paint.setColor(resultPointColor); for (ResultPoint point : currentPossible) { canvas.drawCircle(frame.left + point.getX(), frame.top + point.getY(), 6.0f, paint); } } if (currentLast != null) { paint.setAlpha(OPAQUE / 2); paint.setColor(resultPointColor); for (ResultPoint point : currentLast) { canvas.drawCircle(frame.left + point.getX(), frame.top + point.getY(), 3.0f, paint); } } // Request another update at the animation interval, but only repaint the laser line, // not the entire viewfinder mask. postInvalidateDelayed(ANIMATION_DELAY, frame.left, frame.top, frame.right, frame.bottom); } }
@Override protected void onResume() { super.onResume(); // CameraManager must be initialized here, not in onCreate(). This is necessary because we don't // want to open the camera driver and measure the screen size if we're going to show the help on // first launch. That led to bugs where the scanning rectangle was the wrong size and partially // off screen. cameraManager = new CameraManager(getApplication()); viewfinderView = (ViewfinderView) findViewById(fakeR.getId("id", "viewfinder_view")); viewfinderView.setCameraManager(cameraManager); resultView = findViewById(fakeR.getId("id", "result_view")); statusView = (TextView) findViewById(fakeR.getId("id", "status_view")); handler = null; lastResult = null; resetStatusView(); SurfaceView surfaceView = (SurfaceView) findViewById(fakeR.getId("id", "preview_view")); SurfaceHolder surfaceHolder = surfaceView.getHolder(); if (hasSurface) { // The activity was paused but not stopped, so the surface still exists. Therefore // surfaceCreated() won't be called, so init the camera here. initCamera(surfaceHolder); } else { // Install the callback and wait for surfaceCreated() to init the camera. surfaceHolder.addCallback(this); surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); } beepManager.updatePrefs(); inactivityTimer.onResume(); Intent intent = getIntent(); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); copyToClipboard = prefs.getBoolean(PreferencesActivity.KEY_COPY_TO_CLIPBOARD, true) && (intent == null || intent.getBooleanExtra(Intents.Scan.SAVE_HISTORY, true)); // source = IntentSource.NONE; source = IntentSource.NATIVE_APP_INTENT; decodeFormats = null; characterSet = null; if (intent != null) { String action = intent.getAction(); String dataString = intent.getDataString(); if (Intents.Scan.ACTION.equals(action)) { // Scan the formats the intent requested, and return the result to the calling activity. source = IntentSource.NATIVE_APP_INTENT; decodeFormats = DecodeFormatManager.parseDecodeFormats(intent); if (intent.hasExtra(Intents.Scan.WIDTH) && intent.hasExtra(Intents.Scan.HEIGHT)) { int width = intent.getIntExtra(Intents.Scan.WIDTH, 0); int height = intent.getIntExtra(Intents.Scan.HEIGHT, 0); if (width > 0 && height > 0) { cameraManager.setManualFramingRect(width, height); } } String customPromptMessage = intent.getStringExtra(Intents.Scan.PROMPT_MESSAGE); if (customPromptMessage != null) { statusView.setText(customPromptMessage); } } else if (dataString != null && dataString.contains(PRODUCT_SEARCH_URL_PREFIX) && dataString.contains(PRODUCT_SEARCH_URL_SUFFIX)) { // Scan only products and send the result to mobile Product Search. source = IntentSource.PRODUCT_SEARCH_LINK; sourceUrl = dataString; decodeFormats = DecodeFormatManager.PRODUCT_FORMATS; } else if (isZXingURL(dataString)) { // Scan formats requested in query string (all formats if none specified). // If a return URL is specified, send the results there. Otherwise, handle it ourselves. source = IntentSource.ZXING_LINK; sourceUrl = dataString; Uri inputUri = Uri.parse(sourceUrl); returnUrlTemplate = inputUri.getQueryParameter(RETURN_URL_PARAM); returnRaw = inputUri.getQueryParameter(RAW_PARAM) != null; decodeFormats = DecodeFormatManager.parseDecodeFormats(inputUri); } characterSet = intent.getStringExtra(Intents.Scan.CHARACTER_SET); } }
@Override public void onDraw(Canvas canvas) { if (cameraManager == null) { return; // not ready yet, early draw before done configuring } Rect frame = cameraManager.getFramingRect(); Rect previewFrame = cameraManager.getFramingRectInPreview(); if (frame == null || previewFrame == null) { return; } int width = canvas.getWidth(); int height = canvas.getHeight(); // Draw the exterior (i.e. outside the framing rect) darkened paint.setColor(resultBitmap != null ? resultColor : maskColor); canvas.drawRect(0, 0, width, frame.top, paint); canvas.drawRect(0, frame.top, frame.left, frame.bottom + 1, paint); canvas.drawRect(frame.right + 1, frame.top, width, frame.bottom + 1, paint); canvas.drawRect(0, frame.bottom + 1, width, height, paint); if (resultBitmap != null) { // Draw the opaque result bitmap over the scanning rectangle paint.setAlpha(CURRENT_POINT_OPACITY); canvas.drawBitmap(resultBitmap, null, frame, paint); } else { // Draw a red "laser scanner" line through the middle to show decoding is active paint.setColor(laserColor); paint.setAlpha(SCANNER_ALPHA[scannerAlpha]); scannerAlpha = (scannerAlpha + 1) % SCANNER_ALPHA.length; int middle = frame.height() / 2 + frame.top; canvas.drawRect(frame.left + 2, middle - 1, frame.right - 1, middle + 2, paint); float scaleX = frame.width() / (float) previewFrame.width(); float scaleY = frame.height() / (float) previewFrame.height(); List<ResultPoint> currentPossible = possibleResultPoints; List<ResultPoint> currentLast = lastPossibleResultPoints; int frameLeft = frame.left; int frameTop = frame.top; if (currentPossible.isEmpty()) { lastPossibleResultPoints = null; } else { possibleResultPoints = new ArrayList<ResultPoint>(5); lastPossibleResultPoints = currentPossible; paint.setAlpha(CURRENT_POINT_OPACITY); paint.setColor(resultPointColor); synchronized (currentPossible) { for (ResultPoint point : currentPossible) { canvas.drawCircle( frameLeft + (int) (point.getX() * scaleX), frameTop + (int) (point.getY() * scaleY), POINT_SIZE, paint); } } } if (currentLast != null) { paint.setAlpha(CURRENT_POINT_OPACITY / 2); paint.setColor(resultPointColor); synchronized (currentLast) { float radius = POINT_SIZE / 2.0f; for (ResultPoint point : currentLast) { canvas.drawCircle( frameLeft + (int) (point.getX() * scaleX), frameTop + (int) (point.getY() * scaleY), radius, paint); } } } // Request another update at the animation interval, but only repaint the laser line, // not the entire viewfinder mask. postInvalidateDelayed( ANIMATION_DELAY, frame.left - POINT_SIZE, frame.top - POINT_SIZE, frame.right + POINT_SIZE, frame.bottom + POINT_SIZE); } }