private static double perceptualHash(String sourceImagePath, String targetImagePath) {

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    Mat matSrc = Imgcodecs.imread(sourceImagePath, Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat matTrgt = Imgcodecs.imread(targetImagePath, Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat matSrcResize = new Mat();
    Mat matTrgtResize = new Mat();
    Imgproc.resize(
        matSrc,
        matSrcResize,
        new Size(matSrc.width(), matSrc.height()),
        0,
        0,
        Imgproc.INTER_NEAREST);
    Imgproc.resize(
        matTrgt,
        matTrgtResize,
        new Size(matTrgt.width(), matTrgt.height()),
        0,
        0,
        Imgproc.INTER_LANCZOS4);
    Mat matSrcDst = new Mat();
    Mat matTrgtDst = new Mat();
    Imgproc.resize(matSrcResize, matSrcDst, new Size(8, 8), 0, 0, Imgproc.INTER_CUBIC);
    Imgproc.resize(matTrgtResize, matTrgtDst, new Size(8, 8), 0, 0, Imgproc.INTER_CUBIC);
    Imgproc.cvtColor(matSrcDst, matSrcDst, Imgproc.COLOR_BGR2GRAY);
    Imgproc.cvtColor(matTrgtDst, matTrgtDst, Imgproc.COLOR_BGR2GRAY);

    int iAvgSrc = 0, iAvgTrgt = 0;
    int[] arrSrc = new int[64];
    int[] arrTrgt = new int[64];
    for (int i = 0; i < 8; i++) {
      byte[] dataSrc = new byte[8];
      matSrcDst.get(i, 0, dataSrc);
      byte[] dataTrgt = new byte[8];
      matTrgtDst.get(i, 0, dataTrgt);

      int tmp = i * 8;
      for (int j = 0; j < 8; j++) {
        int tmpSrc = tmp + j;
        arrSrc[tmpSrc] = dataSrc[j] / 4 * 4;
        arrTrgt[tmpSrc] = dataTrgt[j] / 4 * 4;
        iAvgSrc += arrSrc[tmpSrc];
        iAvgTrgt += arrTrgt[tmpSrc];
      }
    }

    iAvgSrc /= 64;
    iAvgTrgt /= 64;
    for (int i = 0; i < 64; i++) {
      arrSrc[i] = (arrSrc[i] >= iAvgSrc) ? 1 : 0;
      arrTrgt[i] = (arrTrgt[i] >= iAvgTrgt) ? 1 : 0;
    }
    int iDiffNum = 0;
    for (int i = 0; i < 64; i++) if (arrSrc[i] != arrTrgt[i]) ++iDiffNum;

    return 1.0 - (double) iDiffNum / 64;
  }
 public static Mat findCardNumber(String path) {
   Mat mat = Highgui.imread(path);
   int x = 0;
   int y = (int) (mat.height() * ((double) 30 / 54));
   int width = mat.cols();
   int height = (int) (mat.height() * ((double) 7 / 54));
   return mat.submat(new Rect(x, y, width, height));
 }
  /* Verifica si el punto esta en la parte
   * central baja de la imagen
   */
  private boolean isInN(Point center) {
    // Esquina superior izq de la region
    Point p0 = new Point();
    p0.x = mRgba.width() / 4;
    p0.y = (mRgba.height() / 4) * 3;

    // Esquina inferior derecha de la region
    Point p1 = new Point();
    p1.x = (mRgba.width() / 4) * 3;
    p1.y = mRgba.height();

    return (p0.x <= center.x && center.x <= p1.x) && (p0.y <= center.y && center.y <= p1.y);
  }
  public void processFrame(CameraEvents.Frame frameEvent) {
    if (service.getActivityMode() != ActivityEvent.Mode.WARP) return;
    if (mode == Mode.SAMPLEWARPPLANE || mode == Mode.SAMPLEWARPGLASS) {
      synchronized (this) {
        if (!isSetup) setupMatrices();
        if (captureSample) {
          captureSample = false;
          Log.d(TAG, "Warp: Capturing Sample");
          Mat frame = frameEvent.getCameraFrame().getRGB();
          byte[] frameJPEG = frameEvent.getCameraFrame().getJPEG();
          if (sampleBGR == null
              || sampleBGR.height() != frame.height()
              || sampleBGR.width() != frame.width())
            sampleBGR = new Mat(frame.rows(), frame.cols(), CvType.CV_8UC3);
          Imgproc.cvtColor(frame, sampleBGR, Imgproc.COLOR_RGB2BGR);
          useSample = true;
          // TODO: Specialize it for this group/device
          com.dappervision.wearscript.Utils.eventBusPost(
              new SendEvent("warpsample", "", ValueFactory.createRawValue(frameJPEG)));
        }
      }
    }

    if (busy) return;
    synchronized (this) {
      busy = true;
      if (!isSetup) setupMatrices();
      if (mode == Mode.CAM2GLASS) {
        Mat inputBGR;
        Mat frame = frameEvent.getCameraFrame().getRGB();
        if (frameBGR == null
            || frameBGR.height() != frame.height()
            || frameBGR.width() != frame.width())
          frameBGR = new Mat(frame.rows(), frame.cols(), CvType.CV_8UC3);
        Mat hSmallToGlassMat = getHSmallToGlassMat(frame.rows(), frame.cols());
        if (hSmallToGlassMat == null) {
          Log.w(TAG, "Warp: Bad size");
          busy = false;
          return;
        }
        Imgproc.cvtColor(frame, frameBGR, Imgproc.COLOR_RGB2BGR);
        inputBGR = frameBGR;
        Imgproc.warpPerspective(
            inputBGR, frameWarp, hSmallToGlassMat, new Size(frameWarp.width(), frameWarp.height()));
        drawFrame(frameWarp);
      }
      busy = false;
    }
  }
  void drawFrame(Mat modified) {
    // Partly from OpenCV CameraBridgeViewBase.java
    if (mCacheBitmap == null) {
      mCacheBitmap =
          Bitmap.createBitmap(modified.width(), modified.height(), Bitmap.Config.ARGB_8888);
    }
    boolean bmpValid = true;
    if (modified != null) {
      try {
        Utils.matToBitmap(modified, mCacheBitmap);
      } catch (Exception e) {
        Log.e(TAG, "Mat type: " + modified);
        Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
        Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
        bmpValid = false;
      }
    }
    if (bmpValid && mCacheBitmap != null) {
      Canvas canvas = view.getHolder().lockCanvas();
      if (canvas != null) {
        canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
        canvas.drawBitmap(
            mCacheBitmap,
            new Rect(0, 0, mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
            new Rect(
                (canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
                (canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
                (canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
                (canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()),
            null);

        view.getHolder().unlockCanvasAndPost(canvas);
      }
    }
  }
  public List<ArtifactDetectedObject> detectObjects(BufferedImage bImage) {
    List<ArtifactDetectedObject> detectedObjectList = new ArrayList<>();
    Mat image = OpenCVUtils.bufferedImageToMat(bImage);
    if (image != null) {
      MatOfRect faceDetections = new MatOfRect();
      double width = image.width();
      double height = image.height();
      for (CascadeClassifierHolder objectClassifier : objectClassifiers) {
        objectClassifier.cascadeClassifier.detectMultiScale(image, faceDetections);

        for (Rect rect : faceDetections.toArray()) {
          ArtifactDetectedObject detectedObject =
              new ArtifactDetectedObject(
                  rect.x / width,
                  rect.y / height,
                  (rect.x + rect.width) / width,
                  (rect.y + rect.height) / height,
                  objectClassifier.conceptIRI,
                  PROCESS);
          detectedObjectList.add(detectedObject);
        }
      }
    }
    return detectedObjectList;
  }
  public Mat onCameraFrame(Mat inputFrame) {
    inputFrame.copyTo(mRgba);
    Point center = new Point(mRgba.width() / 2, mRgba.height() / 2);
    double angle = -90;
    double scale = 1.0;

    Mat mapMatrix = Imgproc.getRotationMatrix2D(center, angle, scale);
    Imgproc.warpAffine(mRgba, mGrayMat, mapMatrix, mRgba.size(), Imgproc.INTER_LINEAR);
    return mGrayMat;
  }
 public void dibujarRegiones() {
   // **** Crear los cuadros de cada region (para debugging) ***
   // L izquierda
   Point pt1 = new Point(0, 0);
   Point pt2 = new Point(mRgba.width() / 4, mRgba.height());
   Core.rectangle(mRgba, pt1, pt2, WHITE);
   // C centro-arriba
   Point pt3 = new Point(mRgba.width() / 4, 0);
   Point pt4 = new Point((mRgba.width() / 4) * 3, (mRgba.height() / 4) * 3);
   Core.rectangle(mRgba, pt3, pt4, WHITE);
   // N centro-abajo
   Point pt5 = new Point(mRgba.width() / 4, (mRgba.height() / 4) * 3);
   Point pt6 = new Point((mRgba.width() / 4) * 3, mRgba.height());
   Core.rectangle(mRgba, pt5, pt6, WHITE);
   // R derecha
   Point pt7 = new Point((mRgba.width() / 4) * 3, 0);
   Point pt8 = new Point(mRgba.width(), mRgba.height());
   Core.rectangle(mRgba, pt7, pt8, WHITE);
 }
 @Override
 public Mat process(Mat input) {
   Bitmap resultBitmap = Bitmap.createBitmap(input.width(), input.height(), Bitmap.Config.RGB_565);
   Utils.matToBitmap(input, resultBitmap);
   try {
     writeBitmapToStorage(resultBitmap);
   } catch (IOException e) {
     Log.e(this.getClass().getName(), e.getMessage());
   }
   return input;
 }
 public void onEventAsync(WarpHEvent event) {
   double[] h = event.getH();
   synchronized (this) {
     if ((mode == Mode.SAMPLEWARPPLANE || mode == Mode.SAMPLEWARPGLASS) && useSample) {
       if (!isSetup) setupMatrices();
       double hSmallToGlass[] = getHSmallToGlass(sampleBGR.height(), sampleBGR.width());
       if (hSmallToGlass == null) {
         Log.w(TAG, "Warp: Bad size");
         return;
       }
       Log.d(TAG, "Warp: WarpHEvent");
       if (mode == Mode.SAMPLEWARPGLASS) {
         h = HMult(hSmallToGlass, h);
       }
       Mat hMat = HMatFromArray(h);
       Imgproc.warpPerspective(
           sampleBGR, frameWarp, hMat, new Size(frameWarp.width(), frameWarp.height()));
       drawFrame(frameWarp);
     }
   }
 }
 public Bitmap convertMatToBitmap(Mat seedsImage) {
   Bitmap bmp = null;
   Mat tmp = new Mat(seedsImage.height(), seedsImage.width(), CvType.CV_8U, new Scalar(4));
   try {
     // Imgproc.cvtColor(seedsImage, tmp, Imgproc.COLOR_RGB2BGRA);
     Imgproc.cvtColor(seedsImage, tmp, Imgproc.COLOR_GRAY2RGBA, 4);
     bmp = Bitmap.createBitmap(tmp.cols(), tmp.rows(), Bitmap.Config.ARGB_8888);
     Utils.matToBitmap(tmp, bmp);
   } catch (CvException e) {
     Log.d("Exception", e.getMessage());
   }
   return bmp;
 }
Exemple #12
0
  public static Mat[] getMatArray(byte[] data, Camera camera) {
    Camera.Parameters parameters = camera.getParameters();
    int width = parameters.getPreviewSize().width;
    int height = parameters.getPreviewSize().height;

    Mat yuv = new Mat(height + height / 2, width, CvType.CV_8UC1);
    Mat rgba = new Mat(height, width, CvType.CV_8UC1);
    yuv.put(0, 0, data);
    Imgproc.cvtColor(yuv, rgba, Imgproc.COLOR_YUV2RGB_NV21);

    Mat rgbaResult = rotateMat(rgba);
    Mat grayResult = new Mat(rgbaResult.height(), rgbaResult.width(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgbaResult, grayResult, Imgproc.COLOR_BGR2GRAY);

    yuv.release();
    rgba.release();
    Mat[] result = {grayResult, rgbaResult};
    return result;
  }
 public void onEvent(WarpDrawEvent event) {
   synchronized (this) {
     if ((mode == Mode.SAMPLEWARPPLANE || mode == Mode.SAMPLEWARPGLASS) && useSample) {
       if (!isSetup) setupMatrices();
       double hGlassToSmall[] = getHGlassToSmall(sampleBGR.height(), sampleBGR.width());
       if (hGlassToSmall == null) {
         Log.w(TAG, "Warp: Bad size");
         return;
       }
       double circleCenter[] = {event.getX(), event.getY(), 1};
       double circleCenterSmall[] = HMultPoint(hGlassToSmall, circleCenter);
       Core.circle(
           sampleBGR,
           new Point(circleCenterSmall[0], circleCenterSmall[1]),
           event.getRadius(),
           new Scalar(event.getR(), event.getG(), event.getB()));
     }
   }
 }
Exemple #14
0
  private void seamlessEdges(Mat image) {
    int imageHeight = image.height();
    int imageWidth = image.width();
    double smoothRange = smoothValue * (imageWidth / 2000f);
    double smoothRangeHalf = smoothRange / 2;
    double pixel1[];
    double pixel2[];
    double tempPixel[] = new double[4];
    tempPixel[3] = 255;

    int i, j;
    for (i = 0; i < smoothRangeHalf; i++) {
      for (j = 0; j < imageHeight; j++) {
        pixel1 = image.get(j, i);
        pixel2 = image.get(j, imageWidth - i - 1);
        tempPixel[0] =
            pixel1[0] * ((smoothRangeHalf + i) / smoothRange)
                + pixel2[0] * ((smoothRangeHalf - i) / smoothRange);
        tempPixel[1] =
            pixel1[1] * ((smoothRangeHalf + i) / smoothRange)
                + pixel2[1] * ((smoothRangeHalf - i) / smoothRange);
        tempPixel[2] =
            pixel1[2] * ((smoothRangeHalf + i) / smoothRange)
                + pixel2[2] * ((smoothRangeHalf - i) / smoothRange);
        pixel2[0] =
            pixel2[0] * ((smoothRangeHalf + i) / smoothRange)
                + pixel1[0] * ((smoothRangeHalf - i) / smoothRange);
        pixel2[1] =
            pixel2[1] * ((smoothRangeHalf + i) / smoothRange)
                + pixel1[1] * ((smoothRangeHalf - i) / smoothRange);
        pixel2[2] =
            pixel2[2] * ((smoothRangeHalf + i) / smoothRange)
                + pixel1[2] * ((smoothRangeHalf - i) / smoothRange);
        image.put(j, i, tempPixel);
        image.put(j, imageWidth - i - 1, pixel2);
      }
    }
  }
Exemple #15
0
  public static Bitmap matToBitmap(Mat m) {
    Bitmap b = Bitmap.createBitmap(m.width(), m.height(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(m, b);

    return b;
  }
Exemple #16
0
  private void drawCircles(Mat img, Mat circles) {
    logMsg("Found " + circles.cols() + " circles");

    /** If two circles and the difference in their y axis is less than or equal to 20 */
    if (circles.cols() == 2 && Math.abs(circles.get(0, 0)[1] - (circles.get(0, 1)[1])) <= 20) {
      drawCircles = true;
      setCircleParameters(circles);
      /// Finds midpoint between the two circles
      double midCircle = (circle1 + circle2) / 2;
      /// Substrate from center in order to get negative distance on the left side
      double distance = center - midCircle;
      /// Calculate angle
      double tempAngle = Math.atan(distance / img.height());
      // Calculate angle change from previous
      double angle_destination_change = (tempAngle - prevAngle) / (endTime - startTime);
      prevAngle = tempAngle;
      // use gyro information from arduino to get rotation rate of heading
      double[] gyro = getGyro();
      double drz = 0;
      if (gyro != null) drz = gyro[2];

      /// Get angle from PD controller
      angle = tempAngle * Kp + (angle_destination_change - drz) * Kd;

      // Ensure angle is within bounds
      if (angle < -1.0) angle = -1.0;
      else if (angle > 1.0) angle = 1.0;

      /** Set the angle of the twist and send twist to server */
      twist.drz(angle);
      sendTwist(twist);
      logMsg("Angle: " + angle);
    } else if (circles.cols() <= 1) {
      if (minRadius >= 10)
      /** Minimum radius is 5 */
      minRadius -= 5;
      if (minDistance >= 30)
      /** Minimum Distance is 20 */
      minDistance -= 10;
      if (maxRadius <= 70)
      /** Max Radius is 80 */
      maxRadius += 10;
    } else {
      if (minRadius + 5 < maxRadius) minRadius += 5;
      if (maxRadius - 10 > minRadius) maxRadius -= 10;
      if (minDistance + 10 <= 100) minDistance += 10;
    }

    /** If it has not seen the circles in 10 frames something is wrong */
    if (drawCircles) {
      frameCount = 0;
    } else if (frameCount > 10) {
      sendTwist(new Twist(0, 0, 0, 0, 0, 0));
      frameCount = 0;
      logMsg("Lost the circles");
    }

    /** Draw the circles */
    for (int x = 0; drawCircles == true && circles.cols() > 1 && x < circles.cols(); x++) {
      double circle[] = circles.get(0, x);
      int ptx = (int) Math.round(circle[0]), pty = (int) Math.round(circle[1]);
      Point pt = new Point(ptx, pty);
      int radius = (int) Math.round(circle[2]);

      /** Draw the circle outline */
      Core.circle(img, pt, radius, new Scalar(0, 255, 0), 3, 8, 0);
      /** Draw the circle center */
      Core.circle(img, pt, 3, new Scalar(0, 255, 0), -1, 8, 0);

      // Core.putText(img, ""+ angle, new Point(50,100), Core.FONT_HERSHEY_COMPLEX, .8, new
      // Scalar(255,0,0));
    }
  }
  public void btn_camera_ok(View view) {
    Log.i(TAG, "btn_camera_ok");
    if (!bPictaken) {
      ToastUtil.showShortToast(getApplicationContext(), "亲要先进行拍照哟!");
      return; // do not forget!
    }
    // async do -> runtime exception: method called after release()
    // maybe the activity releases before the thread returns!
    final Mat image = Highgui.imread(filePath);
    int width = image.width();
    int height = image.height();
    if (width > height) { // portrait should be rotated! direction? yes!
      Core.flip(image.t(), image, 0); // counter-clock wise 90
    }
    Imgproc.cvtColor(image, image, Imgproc.COLOR_BGR2GRAY); // gray
    Imgproc.resize(image, image, new Size(CommonUtil.IMAGE_WIDTH, CommonUtil.IMAGE_HEIGHT)); //
    int total = 0;
    String stotal = CommonUtil.userProps.getProperty("total");
    if (null != stotal) { // have some users!
      total = Integer.parseInt(stotal);
    }
    if (userid <= 0) { // not have this one!
      userid = total + 1;
      try { // save new data!
        CommonUtil.userProps.setProperty("total", String.valueOf(userid));
        CommonUtil.userProps.setProperty(String.valueOf(userid), name);
        CommonUtil.saveUserProperties(CommonUtil.userProps);
      } catch (Exception e) {
        e.printStackTrace();
      }
      // creat folder for this user!
      File userfolder =
          new File(
              CommonUtil.USERFOLDER.getAbsolutePath() + File.separator + String.valueOf(userid));
      if (!userfolder.exists()) {
        userfolder.mkdir();
      }
    }
    filePath =
        CommonUtil.USERFOLDER.getAbsolutePath()
            + File.separator
            + String.valueOf(userid)
            + File.separator
            + System.currentTimeMillis()
            + ".jpg"; // folder (user / userid)
    Highgui.imwrite(filePath, image);
    // save data to facedata.txt
    String data = filePath + ";" + userid + "\n"; // user image file path;user id
    try {
      RandomAccessFile facedataFile =
          new RandomAccessFile(
              CommonUtil.SDFOLDER + File.separator + CommonUtil.FACEDATA_FILENAME, "rw");
      facedataFile.seek(facedataFile.length());
      facedataFile.write(data.getBytes());
      facedataFile.close();
    } catch (FileNotFoundException e) {
      e.printStackTrace();
    } catch (IOException e) {
      e.printStackTrace();
    }
    Log.i(TAG, "image process ok");

    // add this pic to the model data
    new AsyncTask<Void, Void, Boolean>() {

      @Override
      protected Boolean doInBackground(Void... params) {
        xface.addImage(image, userid); // how to determinate the result of adding image?!TODO!
        return true;
      }

      @Override
      protected void onPostExecute(Boolean result) {
        if (result) {
          ToastUtil.showShortToast(getApplicationContext(), "照片保存成功,模型建立好咯!");
        } else {
          ToastUtil.showShortToast(getApplicationContext(), "照片保存成功,模型建立失败啦!");
        }
        btn_camera_ok.setEnabled(true);
      }

      @Override
      protected void onPreExecute() {
        ToastUtil.showShortToast(getApplicationContext(), "照片保存中...");
        btn_camera_ok.setEnabled(false); // can not let user save two images at the same time!
      }
    }.execute();
  }
Exemple #18
0
  public void run() {
    ArrayList<Geometry.Quad> squares;

    Mat image = new Mat();
    Utils.bitmapToMat(source, image);

    Mat bwimage = new Mat();
    cvtColor(image, bwimage, COLOR_RGB2GRAY);

    Mat blurred = new Mat();
    medianBlur(image, blurred, 9);

    int width = blurred.width();
    int height = blurred.height();
    int depth = blurred.depth();

    Mat gray0 = new Mat(width, height, depth);
    blurred.copyTo(gray0);

    squares = new ArrayList<Geometry.Quad>();

    // find squares in every color plane of the image
    for (int c = 0; c < 3; c++) {
      Core.mixChannels(
          Arrays.asList(blurred), Arrays.asList(new Mat[] {gray0}), new MatOfInt(c, 0));

      // try several threshold levels
      int thresholdLevel = 8;
      for (int l = 0; l < thresholdLevel; l++) {
        // use canny instead of 0 threshold level
        // canny helps catch squares with gradient shading
        Mat gray = new Mat();

        if (l == 0) {
          Canny(gray0, gray, 10.0, 20.0, 3, false);
          Mat kernel = new Mat(11, 11, CvType.CV_8UC1, new Scalar(1));
          dilate(gray, gray, kernel);
        } else {
          Mat thresh = new Mat(gray0.rows(), gray0.cols(), gray0.type());
          threshold(gray0, thresh, ((double) l) / thresholdLevel * 255, 128, THRESH_BINARY_INV);
          cvtColor(thresh, gray, COLOR_BGR2GRAY);
        }

        // find contours and store them in a list
        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        findContours(gray, contours, new Mat(), RETR_LIST, CHAIN_APPROX_SIMPLE);

        // test contours
        for (MatOfPoint contour : contours) {
          // approximate contour with accuracy proportional to the contour perimeter
          MatOfPoint2f thisContour = new MatOfPoint2f(contour.toArray());
          double arclength = 0.02 * arcLength(thisContour, true);
          MatOfPoint2f approx = new MatOfPoint2f();
          approxPolyDP(thisContour, approx, arclength, true);

          double area = contourArea(approx);
          boolean isConvex = isContourConvex(new MatOfPoint(approx.toArray()));

          if (approx.rows() == 4 && Math.abs(area) > SQUARE_SIZE && isConvex) {
            double maxCosine = 0;

            Point[] approxArray = approx.toArray();
            for (int j = 2; j < 5; j++) {
              double cosine =
                  Math.abs(angle(approxArray[j % 4], approxArray[j - 2], approxArray[j - 1]));
              maxCosine = Math.max(maxCosine, cosine);
            }

            if (maxCosine > THRESHOLD_COS) {
              squares.add(new Geometry.Quad(approxArray));
              Log.d(TAG, "area = " + area);
            }
          }
        }
      }
    }

    result = new Bundle();
    result.putParcelableArrayList("squares", squares);
    Log.d(TAG, "result created");

    finish();
  }
  public Mat onCameraFrame(Mat inputFrame) {
    inputFrame.copyTo(mRgba);

    switch (ImageManipulationsActivity.viewMode) {
      case ImageManipulationsActivity.VIEW_MODE_RGBA:
        break;

      case ImageManipulationsActivity.VIEW_MODE_HIST:
        if ((mSizeRgba == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        int thikness = (int) (mSizeRgba.width / (mHistSizeNum + 10) / 5);
        if (thikness > 5) thikness = 5;
        int offset = (int) ((mSizeRgba.width - (5 * mHistSizeNum + 4 * 10) * thikness) / 2);
        // RGB
        for (int c = 0; c < 3; c++) {
          Imgproc.calcHist(Arrays.asList(mRgba), mChannels[c], mMat0, mHist, mHistSize, mRanges);
          Core.normalize(mHist, mHist, mSizeRgba.height / 2, 0, Core.NORM_INF);
          mHist.get(0, 0, mBuff);
          for (int h = 0; h < mHistSizeNum; h++) {
            mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness;
            mP1.y = mSizeRgba.height - 1;
            mP2.y = mP1.y - 2 - (int) mBuff[h];
            Core.line(mRgba, mP1, mP2, mColorsRGB[c], thikness);
          }
        }
        // Value and Hue
        Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL);
        // Value
        Imgproc.calcHist(
            Arrays.asList(mIntermediateMat), mChannels[2], mMat0, mHist, mHistSize, mRanges);
        Core.normalize(mHist, mHist, mSizeRgba.height / 2, 0, Core.NORM_INF);
        mHist.get(0, 0, mBuff);
        for (int h = 0; h < mHistSizeNum; h++) {
          mP1.x = mP2.x = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
          mP1.y = mSizeRgba.height - 1;
          mP2.y = mP1.y - 2 - (int) mBuff[h];
          Core.line(mRgba, mP1, mP2, mWhilte, thikness);
        }
        // Hue
        Imgproc.calcHist(
            Arrays.asList(mIntermediateMat), mChannels[0], mMat0, mHist, mHistSize, mRanges);
        Core.normalize(mHist, mHist, mSizeRgba.height / 2, 0, Core.NORM_INF);
        mHist.get(0, 0, mBuff);
        for (int h = 0; h < mHistSizeNum; h++) {
          mP1.x = mP2.x = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
          mP1.y = mSizeRgba.height - 1;
          mP2.y = mP1.y - 2 - (int) mBuff[h];
          Core.line(mRgba, mP1, mP2, mColorsHue[h], thikness);
        }
        break;

      case ImageManipulationsActivity.VIEW_MODE_CANNY:
        if ((mRgbaInnerWindow == null)
            || (mGrayInnerWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90);
        Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
        break;

      case ImageManipulationsActivity.VIEW_MODE_SOBEL:
        Imgproc.cvtColor(mRgba, mGray, Imgproc.COLOR_RGBA2GRAY);

        if ((mRgbaInnerWindow == null)
            || (mGrayInnerWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();

        Imgproc.Sobel(mGrayInnerWindow, mIntermediateMat, CvType.CV_8U, 1, 1);
        Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
        Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
        break;

      case ImageManipulationsActivity.VIEW_MODE_SEPIA:
        Core.transform(mRgba, mRgba, mSepiaKernel);
        break;

      case ImageManipulationsActivity.VIEW_MODE_ZOOM:
        if ((mZoomCorner == null)
            || (mZoomWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        Imgproc.resize(mZoomWindow, mZoomCorner, mZoomCorner.size());

        Size wsize = mZoomWindow.size();
        Core.rectangle(
            mZoomWindow,
            new Point(1, 1),
            new Point(wsize.width - 2, wsize.height - 2),
            new Scalar(255, 0, 0, 255),
            2);
        break;

      case ImageManipulationsActivity.VIEW_MODE_PIXELIZE:
        if ((mRgbaInnerWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        Imgproc.resize(mRgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
        Imgproc.resize(
            mIntermediateMat, mRgbaInnerWindow, mSizeRgbaInner, 0., 0., Imgproc.INTER_NEAREST);
        break;

      case ImageManipulationsActivity.VIEW_MODE_POSTERIZE:
        if ((mRgbaInnerWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        /*
        Imgproc.cvtColor(mRgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB);
        Imgproc.pyrMeanShiftFiltering(mIntermediateMat, mIntermediateMat, 5, 50);
        Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_RGB2RGBA);
        */

        Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90);
        mRgbaInnerWindow.setTo(new Scalar(0, 0, 0, 255), mIntermediateMat);
        Core.convertScaleAbs(mRgbaInnerWindow, mIntermediateMat, 1. / 16, 0);
        Core.convertScaleAbs(mIntermediateMat, mRgbaInnerWindow, 16, 0);
        break;
    }

    return mRgba;
  }
Exemple #20
0
  public Mat cutFrame(
      Mat image,
      double resdpi,
      int frameWidthPix,
      int frameHeightPix,
      boolean reverseImage,
      int frameNum,
      double scaleMult,
      boolean rescale,
      boolean correctrotation) {
    noCut = false;
    if (farEdge == null || farEdge.stdDev > worstEdgeStdDevAllowed) {
      System.out.println(
          "WARNING: far film edge for frame "
              + frameNum
              + " has a stdDev of "
              + (farEdge == null ? "null" : Double.toString(farEdge.stdDev))
              + " and will not be used.");

      if (sprocketEdge == null || sprocketEdge.stdDev > worstEdgeStdDevAllowed) {
        System.out.println(
            "WARNING: near film edge for frame "
                + frameNum
                + " has a stdDev of "
                + (sprocketEdge == null ? "null" : Double.toString(sprocketEdge.stdDev))
                + " and will not be used.");
        noCut = true;
        return null;
      } else
        preMapSetupUsingSprocketEdge(
            frameWidthPix, frameHeightPix, scaleMult, reverseImage, rescale, correctrotation);
    } else
      preMapSetupUsingFarEdge(
          frameWidthPix, frameHeightPix, scaleMult, reverseImage, rescale, correctrotation);

    outOfBounds = false;

    CvRaster srcraster = CvRaster.create(image.height(), image.width(), image.type());
    srcraster.loadFrom(image);
    CvRaster dstraster = CvRaster.create(frameHeightPix, frameWidthPix, srcraster.type);

    int srcwidth = srcraster.cols;
    int srcheight = srcraster.rows;

    int dstwidth = dstraster.cols;
    int dstheight = dstraster.rows;

    for (int dstRow = 0; dstRow < dstheight; dstRow++) {
      for (int dstCol = 0; dstCol < dstwidth; dstCol++) {
        Point srclocation = map(dstRow, dstCol, frameWidthPix, frameHeightPix, reverseImage);

        if (leftmostCol > srclocation.x) leftmostCol = srclocation.x;
        if (rightmostCol < srclocation.x) rightmostCol = srclocation.x;
        if (topmostRow > srclocation.y) topmostRow = srclocation.y;
        if (bottommostRow < srclocation.y) bottommostRow = srclocation.y;

        if (srclocation.y < 0
            || srclocation.y >= srcheight
            || srclocation.x < 0
            || srclocation.x >= srcwidth) {
          dstraster.zero(dstRow, dstCol);
          outOfBounds = true;
        } else dstraster.set(dstRow, dstCol, srcraster.get(srclocation.y, srclocation.x));
      }
    }

    frameCut = true;

    return dstraster.toMat();
  }