Example #1
0
  /**
   * Provides an image for Tesseract to recognize. Does not copy the image buffer. The source image
   * must persist until after Recognize or GetUTF8Chars is called.
   *
   * @param bmp bitmap representation of the image
   */
  public void setImage(Bitmap bmp) {
    Pix image = ReadFile.readBitmap(bmp);

    if (image == null) {
      throw new RuntimeException("Failed to read bitmap");
    }

    nativeSetImagePix(image.getNativePix());
  }
Example #2
0
  /**
   * Provides an image for Tesseract to recognize.
   *
   * @param file absolute path to the image file
   */
  public void setImage(File file) {
    Pix image = ReadFile.readFile(file);

    if (image == null) {
      throw new RuntimeException("Failed to read image file");
    }

    nativeSetImagePix(image.getNativePix());
  }
  @SuppressWarnings("unused")
  private OcrResult getOcrResult() {
    OcrResult ocrResult;
    String textResult;
    long start = System.currentTimeMillis();

    try {
      baseApi.setImage(ReadFile.readBitmap(bitmap));
      textResult = baseApi.getUTF8Text();
      timeRequired = System.currentTimeMillis() - start;

      // Check for failure to recognize text
      if (textResult == null || textResult.equals("")) {
        return null;
      }
      ocrResult = new OcrResult();
      ocrResult.setWordConfidences(baseApi.wordConfidences());
      ocrResult.setMeanConfidence(baseApi.meanConfidence());
      if (ViewfinderView.DRAW_REGION_BOXES) {
        ocrResult.setRegionBoundingBoxes(baseApi.getRegions().getBoxRects());
      }
      if (ViewfinderView.DRAW_TEXTLINE_BOXES) {
        ocrResult.setTextlineBoundingBoxes(baseApi.getTextlines().getBoxRects());
      }
      if (ViewfinderView.DRAW_STRIP_BOXES) {
        ocrResult.setStripBoundingBoxes(baseApi.getStrips().getBoxRects());
      }

      // Always get the word bounding boxes--we want it for annotating the bitmap after the user
      // presses the shutter button, in addition to maybe wanting to draw boxes/words during the
      // continuous mode recognition.
      ocrResult.setWordBoundingBoxes(baseApi.getWords().getBoxRects());

      //      if (ViewfinderView.DRAW_CHARACTER_BOXES || ViewfinderView.DRAW_CHARACTER_TEXT) {
      //        ocrResult.setCharacterBoundingBoxes(baseApi.getCharacters().getBoxRects());
      //      }
    } catch (RuntimeException e) {
      Log.e(
          "OcrRecognizeAsyncTask",
          "Caught RuntimeException in request to Tesseract. Setting state to CONTINUOUS_STOPPED.");
      e.printStackTrace();
      try {
        baseApi.clear();
        activity.stopHandler();
      } catch (NullPointerException e1) {
        // Continue
      }
      return null;
    }
    timeRequired = System.currentTimeMillis() - start;
    ocrResult.setBitmap(bitmap);
    ocrResult.setText(textResult);
    ocrResult.setRecognitionTimeRequired(timeRequired);
    return ocrResult;
  }
Example #4
0
  /**
   * Provides an image for Tesseract to recognize. Copies the image buffer. The source image may be
   * destroyed immediately after SetImage is called. SetImage clears all recognition results, and
   * sets the rectangle to the full image, so it may be followed immediately by a GetUTF8Text, and
   * it will automatically perform recognition.
   *
   * @param file absolute path to the image file
   */
  public void setImage(File file) {
    if (mRecycled) throw new IllegalStateException();

    Pix image = ReadFile.readFile(file);

    if (image == null) {
      throw new RuntimeException("Failed to read image file");
    }

    nativeSetImagePix(image.getNativePix());
  }
Example #5
0
  protected void onPhotoTaken() {
    _taken = true;

    /*BitmapFactory.Options options = new BitmapFactory.Options();
    options.inSampleSize = 4;


    Bitmap oldbitmap = BitmapFactory.decodeFile(_path, options);


    Pix pix = ReadFile.readBitmap(oldbitmap);*/

    BitmapFactory.Options otherOptions = new BitmapFactory.Options();
    otherOptions.inScaled = false;
    Bitmap oldbitmap2 = BitmapFactory.decodeFile(_path, otherOptions);

    Pix pix = ReadFile.readBitmap(oldbitmap2);

    pix = AdaptiveMap.backgroundNormMorph(pix);
    pix = Enhance.unsharpMasking(pix);
    pix = GrayQuant.pixThresholdToBinary(pix, 125);

    // pix = Scale.scale(pix, 10, 10);

    Bitmap bitmap = WriteFile.writeBitmap(pix);

    try {
      ExifInterface exif = new ExifInterface(_path);
      int exifOrientation =
          exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);

      Log.v(TAG, "Orient: " + exifOrientation);

      int rotate = 0;

      switch (exifOrientation) {
        case ExifInterface.ORIENTATION_ROTATE_90:
          rotate = 90;
          break;
        case ExifInterface.ORIENTATION_ROTATE_180:
          rotate = 180;
          break;
        case ExifInterface.ORIENTATION_ROTATE_270:
          rotate = 270;
          break;
      }

      Log.v(TAG, "Rotation: " + rotate);

      if (rotate != 0) {

        // Getting width & height of the given image.
        int w = bitmap.getWidth();
        int h = bitmap.getHeight();

        // Setting pre rotate
        Matrix mtx = new Matrix();
        mtx.preRotate(rotate);

        // Rotating Bitmap
        bitmap = Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, false);
      }

      // Convert to ARGB_8888, required by tess
      bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);

    } catch (IOException e) {
      Log.e(TAG, "Couldn't correct orientation: " + e.toString());
    }

    // _image.setImageBitmap( bitmap );

    Log.v(TAG, "Before baseApi");

    TessBaseAPI baseApi = new TessBaseAPI();
    // baseApi.setDebug(true);
    baseApi.init(DATA_PATH, lang);
    baseApi.setImage(bitmap);

    String recognizedText = baseApi.getUTF8Text();

    baseApi.end();

    // You now have the text in recognizedText var, you can do anything with it.
    // We will display a stripped out trimmed alpha-numeric version of it (if lang is eng)
    // so that garbage doesn't make it to the display.

    Log.v(TAG, "OCRED TEXT: " + recognizedText);

    /*if ( lang.equalsIgnoreCase("eng") ) {
        recognizedText = recognizedText.replaceAll("[^a-zA-Z0-9]+", " ");
    }*/

    recognizedText = recognizedText.trim();

    Intent myIntent = new Intent(OCR.this, getInfo.class);
    myIntent.putExtra("text", recognizedText); // Optional parameters
    OCR.this.startActivity(myIntent);

    // Cycle done.
  }