/** * Launch an AsyncTask to perform an OCR decode for single-shot mode. * * @param data Image data * @param width Image width * @param height Image height */ private void ocrDecode(byte[] data, int width, int height) { beepManager.playBeepSoundAndVibrate(); activity.displayProgressDialog(); // Launch OCR asynchronously, so we get the dialog box displayed immediately new OcrRecognizeAsyncTask(activity, baseApi, data, width, height).execute(); }
private void sendContinuousOcrFailMessage() { Handler handler = activity.getHandler(); if (handler != null) { Message message = Message.obtain( handler, R.id.ocr_continuous_decode_failed, new OcrResultFailure(timeRequired)); message.sendToTarget(); } }
@SuppressWarnings("unused") private OcrResult getOcrResult() { OcrResult ocrResult; String textResult; long start = System.currentTimeMillis(); try { baseApi.setImage(ReadFile.readBitmap(bitmap)); textResult = baseApi.getUTF8Text(); timeRequired = System.currentTimeMillis() - start; // Check for failure to recognize text if (textResult == null || textResult.equals("")) { return null; } ocrResult = new OcrResult(); ocrResult.setWordConfidences(baseApi.wordConfidences()); ocrResult.setMeanConfidence(baseApi.meanConfidence()); if (ViewfinderView.DRAW_REGION_BOXES) { ocrResult.setRegionBoundingBoxes(baseApi.getRegions().getBoxRects()); } if (ViewfinderView.DRAW_TEXTLINE_BOXES) { ocrResult.setTextlineBoundingBoxes(baseApi.getTextlines().getBoxRects()); } if (ViewfinderView.DRAW_STRIP_BOXES) { ocrResult.setStripBoundingBoxes(baseApi.getStrips().getBoxRects()); } // Always get the word bounding boxes--we want it for annotating the bitmap after the user // presses the shutter button, in addition to maybe wanting to draw boxes/words during the // continuous mode recognition. ocrResult.setWordBoundingBoxes(baseApi.getWords().getBoxRects()); // if (ViewfinderView.DRAW_CHARACTER_BOXES || ViewfinderView.DRAW_CHARACTER_TEXT) { // ocrResult.setCharacterBoundingBoxes(baseApi.getCharacters().getBoxRects()); // } } catch (RuntimeException e) { Log.e( "OcrRecognizeAsyncTask", "Caught RuntimeException in request to Tesseract. Setting state to CONTINUOUS_STOPPED."); e.printStackTrace(); try { baseApi.clear(); activity.stopHandler(); } catch (NullPointerException e1) { // Continue } return null; } timeRequired = System.currentTimeMillis() - start; ocrResult.setBitmap(bitmap); ocrResult.setText(textResult); ocrResult.setRecognitionTimeRequired(timeRequired); return ocrResult; }
/** * Perform an OCR decode for realtime recognition mode. * * @param data Image data * @param width Image width * @param height Image height */ private void ocrContinuousDecode(byte[] data, int width, int height) { PlanarYUVLuminanceSource source = activity.getCameraManager().buildLuminanceSource(data, width, height); if (source == null) { sendContinuousOcrFailMessage(); return; } bitmap = source.renderCroppedGreyscaleBitmap(); OcrResult ocrResult = getOcrResult(); Handler handler = activity.getHandler(); if (handler == null) { return; } if (ocrResult == null) { try { sendContinuousOcrFailMessage(); } catch (NullPointerException e) { activity.stopHandler(); } finally { bitmap.recycle(); baseApi.clear(); } return; } try { Message message = Message.obtain(handler, R.id.ocr_continuous_decode_succeeded, ocrResult); message.sendToTarget(); } catch (NullPointerException e) { activity.stopHandler(); } finally { baseApi.clear(); } }
DecodeHandler(CaptureActivity activity) { this.activity = activity; baseApi = activity.getBaseApi(); beepManager = new BeepManager(activity); beepManager.updatePrefs(); }