private static Bitmap canny(Bitmap image) {

    // convert image to matrix
    Mat Mat1 = new Mat(image.getWidth(), image.getHeight(), CvType.CV_32FC1);
    Utils.bitmapToMat(image, Mat1);

    // create temporary matrix2
    Mat Mat2 = new Mat(image.getWidth(), image.getHeight(), CvType.CV_32FC1);

    // convert image to grayscale
    Imgproc.cvtColor(Mat1, Mat2, Imgproc.COLOR_BGR2GRAY);

    // doing a gaussian blur prevents getting a lot of false hits
    Imgproc.GaussianBlur(Mat2, Mat1, new Size(3, 3), 2, 2); // ?

    // now apply canny function
    int param_threshold1 = 25; // manually defined
    int param_threshold2 = param_threshold1 * 3; // Cannys recommendation
    Imgproc.Canny(Mat1, Mat2, param_threshold1, param_threshold2);

    // ?
    Imgproc.cvtColor(Mat2, Mat1, Imgproc.COLOR_GRAY2BGRA, 4);

    // convert matrix to output bitmap
    Bitmap output = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.RGB_565);
    Utils.matToBitmap(Mat1, output);
    return output;
  }
예제 #2
0
 public static void cvt_YUVtoGRAY(Mat src, Mat dst) {
   /** convert YUV image to RGB then GRAY colorspace */
   mSrc = new Mat();
   src.copyTo(mSrc);
   Imgproc.cvtColor(mSrc, dst, Imgproc.COLOR_YUV420sp2RGB);
   Imgproc.cvtColor(dst, dst, Imgproc.COLOR_RGB2GRAY);
 }
예제 #3
0
  public static void main(String[] args) {
    try {

      System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
      Mat source = Imgcodecs.imread("test_image.jpg", 0);

      Mat destination = new Mat(source.rows(), source.cols(), source.type());
      Imgproc.GaussianBlur(source, source, new Size(45, 45), 0);
      Imgproc.adaptiveThreshold(
          source, source, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 75, 10);
      Core.bitwise_not(source, source);

      // Line detection
      Mat img2 = null;
      Imgproc.cvtColor(source, img2, Imgproc.COLOR_GRAY2RGB);

      Mat img3 = null;
      Imgproc.cvtColor(source, img3, Imgproc.COLOR_GRAY2RGB);

      MatOfInt4 lines = new MatOfInt4();
      // Imgproc.HoughLines(img, lines, rho, theta, threshold);

      // Write to File
      Imgcodecs.imwrite("gaussian.jpg", source);
      System.out.println("Success!");
    } catch (Exception e) {
      System.out.println("Error has occurred: " + e.getMessage());
    }
  }
예제 #4
0
  public boolean hasChanges(Mat current) {
    int PIXEL_DIFF_THRESHOLD = 5;
    int IMAGE_DIFF_THRESHOLD = 5;
    Mat bg = new Mat();
    Mat cg = new Mat();
    Mat diff = new Mat();
    Mat tdiff = new Mat();

    Imgproc.cvtColor(base, bg, Imgproc.COLOR_BGR2GRAY);
    Imgproc.cvtColor(current, cg, Imgproc.COLOR_BGR2GRAY);
    Core.absdiff(bg, cg, diff);
    Imgproc.threshold(diff, tdiff, PIXEL_DIFF_THRESHOLD, 0.0, Imgproc.THRESH_TOZERO);
    if (Core.countNonZero(tdiff) <= IMAGE_DIFF_THRESHOLD) {
      return false;
    }

    Imgproc.threshold(diff, diff, PIXEL_DIFF_THRESHOLD, 255, Imgproc.THRESH_BINARY);
    Imgproc.dilate(diff, diff, new Mat());
    Mat se = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
    Imgproc.morphologyEx(diff, diff, Imgproc.MORPH_CLOSE, se);

    List<MatOfPoint> points = new ArrayList<MatOfPoint>();
    Mat contours = new Mat();
    Imgproc.findContours(diff, points, contours, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
    int n = 0;
    for (Mat pm : points) {
      log(lvl, "(%d) %s", n++, pm);
      printMatI(pm);
    }
    log(lvl, "contours: %s", contours);
    printMatI(contours);
    return true;
  }
예제 #5
0
  /**
   * @param inputImg
   * @return Mat
   */
  public static Mat watershed(Mat inputImg) {

    Mat target = new Mat(inputImg.rows(), inputImg.cols(), CvType.CV_8UC3);
    Imgproc.cvtColor(inputImg, target, Imgproc.COLOR_BGR2RGB);

    // Conversion to 8UC1 grayscale image
    Mat grayScale = new Mat(inputImg.rows(), inputImg.cols(), CvType.CV_32SC1);
    Imgproc.cvtColor(inputImg, grayScale, Imgproc.COLOR_BGR2GRAY);

    // constructing a 3x3 kernel for morphological opening
    Mat openingKernel = Mat.ones(9, 9, CvType.CV_8U);

    // яскравість
    // target.convertTo(target, -1, 10d * 12 / 100, 0);
    // Imgproc.dilate(target, target, new Mat(), new Point(-1, -1), 1);

    Size s = new Size(27, 27);
    Imgproc.GaussianBlur(target, target, s, 1.7);

    Imgproc.morphologyEx(target, target, Imgproc.MORPH_OPEN, openingKernel);

    // dilation operation for extracting the background
    // Imgproc.dilate(target, target, openingKernel);
    // Imgproc.erode(target, target, new Mat(), new Point(-1, -1), 1);

    Mat seeds = new Mat(target.rows(), target.cols(), CvType.CV_32SC1);

    for (int i = 0; i < 10; i++) {
      seeds.put(((int) Math.random()) % target.rows(), ((int) Math.random()) % target.cols(), i);
    }

    Imgproc.watershed(target, seeds);
    // Imgproc.threshold(target,target, 50, 155, Imgproc.THRESH_BINARY );
    return target;
  }
예제 #6
0
  private static double perceptualHash(String sourceImagePath, String targetImagePath) {

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    Mat matSrc = Imgcodecs.imread(sourceImagePath, Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat matTrgt = Imgcodecs.imread(targetImagePath, Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat matSrcResize = new Mat();
    Mat matTrgtResize = new Mat();
    Imgproc.resize(
        matSrc,
        matSrcResize,
        new Size(matSrc.width(), matSrc.height()),
        0,
        0,
        Imgproc.INTER_NEAREST);
    Imgproc.resize(
        matTrgt,
        matTrgtResize,
        new Size(matTrgt.width(), matTrgt.height()),
        0,
        0,
        Imgproc.INTER_LANCZOS4);
    Mat matSrcDst = new Mat();
    Mat matTrgtDst = new Mat();
    Imgproc.resize(matSrcResize, matSrcDst, new Size(8, 8), 0, 0, Imgproc.INTER_CUBIC);
    Imgproc.resize(matTrgtResize, matTrgtDst, new Size(8, 8), 0, 0, Imgproc.INTER_CUBIC);
    Imgproc.cvtColor(matSrcDst, matSrcDst, Imgproc.COLOR_BGR2GRAY);
    Imgproc.cvtColor(matTrgtDst, matTrgtDst, Imgproc.COLOR_BGR2GRAY);

    int iAvgSrc = 0, iAvgTrgt = 0;
    int[] arrSrc = new int[64];
    int[] arrTrgt = new int[64];
    for (int i = 0; i < 8; i++) {
      byte[] dataSrc = new byte[8];
      matSrcDst.get(i, 0, dataSrc);
      byte[] dataTrgt = new byte[8];
      matTrgtDst.get(i, 0, dataTrgt);

      int tmp = i * 8;
      for (int j = 0; j < 8; j++) {
        int tmpSrc = tmp + j;
        arrSrc[tmpSrc] = dataSrc[j] / 4 * 4;
        arrTrgt[tmpSrc] = dataTrgt[j] / 4 * 4;
        iAvgSrc += arrSrc[tmpSrc];
        iAvgTrgt += arrTrgt[tmpSrc];
      }
    }

    iAvgSrc /= 64;
    iAvgTrgt /= 64;
    for (int i = 0; i < 64; i++) {
      arrSrc[i] = (arrSrc[i] >= iAvgSrc) ? 1 : 0;
      arrTrgt[i] = (arrTrgt[i] >= iAvgTrgt) ? 1 : 0;
    }
    int iDiffNum = 0;
    for (int i = 0; i < 64; i++) if (arrSrc[i] != arrTrgt[i]) ++iDiffNum;

    return 1.0 - (double) iDiffNum / 64;
  }
예제 #7
0
  public void processFrame(CameraEvents.Frame frameEvent) {
    if (service.getActivityMode() != ActivityEvent.Mode.WARP) return;
    if (mode == Mode.SAMPLEWARPPLANE || mode == Mode.SAMPLEWARPGLASS) {
      synchronized (this) {
        if (!isSetup) setupMatrices();
        if (captureSample) {
          captureSample = false;
          Log.d(TAG, "Warp: Capturing Sample");
          Mat frame = frameEvent.getCameraFrame().getRGB();
          byte[] frameJPEG = frameEvent.getCameraFrame().getJPEG();
          if (sampleBGR == null
              || sampleBGR.height() != frame.height()
              || sampleBGR.width() != frame.width())
            sampleBGR = new Mat(frame.rows(), frame.cols(), CvType.CV_8UC3);
          Imgproc.cvtColor(frame, sampleBGR, Imgproc.COLOR_RGB2BGR);
          useSample = true;
          // TODO: Specialize it for this group/device
          com.dappervision.wearscript.Utils.eventBusPost(
              new SendEvent("warpsample", "", ValueFactory.createRawValue(frameJPEG)));
        }
      }
    }

    if (busy) return;
    synchronized (this) {
      busy = true;
      if (!isSetup) setupMatrices();
      if (mode == Mode.CAM2GLASS) {
        Mat inputBGR;
        Mat frame = frameEvent.getCameraFrame().getRGB();
        if (frameBGR == null
            || frameBGR.height() != frame.height()
            || frameBGR.width() != frame.width())
          frameBGR = new Mat(frame.rows(), frame.cols(), CvType.CV_8UC3);
        Mat hSmallToGlassMat = getHSmallToGlassMat(frame.rows(), frame.cols());
        if (hSmallToGlassMat == null) {
          Log.w(TAG, "Warp: Bad size");
          busy = false;
          return;
        }
        Imgproc.cvtColor(frame, frameBGR, Imgproc.COLOR_RGB2BGR);
        inputBGR = frameBGR;
        Imgproc.warpPerspective(
            inputBGR, frameWarp, hSmallToGlassMat, new Size(frameWarp.width(), frameWarp.height()));
        drawFrame(frameWarp);
      }
      busy = false;
    }
  }
예제 #8
0
  public MarkerTracker(Mat image, Mat template) {
    this.image = image;
    this.template = template;
    Log.i("Marker-Tracker", "image is null?::" + (null == image));

    imgGray = new Mat(image.size(), image.type());
    templGray = new Mat(template.size(), template.type());
    // Convert them to grayscale
    Imgproc.cvtColor(image, imgGray, Imgproc.COLOR_BGRA2GRAY);
    //  Core.normalize(imgGray, imgGray, 0, 255, Core.NORM_MINMAX);

    // Mat	grayImage02 = new Mat(image02.rows(), image02.cols(), image02.type());
    Imgproc.cvtColor(template, templGray, Imgproc.COLOR_BGRA2GRAY);
    //        Core.normalize(templGray, templGray, 0, 255, Core.NORM_MINMAX);

  }
예제 #9
0
  public void init_sd_imgs() {
    try {
      if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
        File sdCardDir = Environment.getExternalStorageDirectory();
        String str_sd_dir = sdCardDir.getCanonicalPath();
        String str_path = str_sd_dir + "/andrcvs/";
        File dir = new File(str_path);
        Toast.makeText(MainActivity.this, str_path, Toast.LENGTH_LONG).show();

        if (!dir.exists()) {
          dir.mkdirs();
          for (int idx = 0; idx < res_imgs.length; idx++) {
            Bitmap bmp = BitmapFactory.decodeResource(getResources(), res_imgs[idx]);
            Mat mat_rgb = new Mat();
            Utils.bitmapToMat(bmp, mat_rgb);
            String str_idx = String.format("%04d", idx + 1);
            String str_fn = str_path + "image_" + str_idx + ".jpg";
            Imgproc.cvtColor(mat_rgb, mat_rgb, Imgproc.COLOR_BGR2RGB);
            Imgcodecs.imwrite(str_fn, mat_rgb);
          }
        }
      }
    } catch (Exception e) {
      e.printStackTrace();
    }
  }
예제 #10
0
  @Override
  public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
    // Imgproc.bilateralFilter(inputFrame.gray(), inputFrame.rgba(), 9, 75, 75);

    /* if (captureBackground) {
        captureBackground = false;
        background = new Mat(inputFrame.gray(), new Rect(0, 0, inputFrame.gray().width(), inputFrame.gray().height()));
    }

    if (background == null) {
        return inputFrame.rgba();
    }


    return background;*/

    background = inputFrame.gray();

    Imgproc.cvtColor(background, mRgb, Imgproc.COLOR_GRAY2RGB);

    /*Imgproc.erode(mFGMask, mFGMask, new Mat());
    Imgproc.dilate(mFGMask, mFGMask, new Mat());

    Imgproc.findContours(mFGMask, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);
    Imgproc.drawContours(mRgb, contours, -1, new Scalar(255, 0, 0), 2);*/

    return mRgb;
  }
  private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
    Mat pointMatRgba = new Mat();
    Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
    Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);

    return new Scalar(pointMatRgba.get(0, 0));
  }
  public void setHsvColor(Scalar hsvColor) {
    double minH =
        (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0] - mColorRadius.val[0] : 0;
    double maxH =
        (hsvColor.val[0] + mColorRadius.val[0] <= 255)
            ? hsvColor.val[0] + mColorRadius.val[0]
            : 255;

    mLowerBound.val[0] = minH;
    mUpperBound.val[0] = maxH;

    mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1];
    mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1];

    mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2];
    mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2];

    mLowerBound.val[3] = 0;
    mUpperBound.val[3] = 255;

    Mat spectrumHsv = new Mat(1, (int) (maxH - minH), CvType.CV_8UC3);

    for (int j = 0; j < maxH - minH; j++) {
      byte[] tmp = {(byte) (minH + j), (byte) 255, (byte) 255};
      spectrumHsv.put(0, j, tmp);
    }

    Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
  }
  @Override
  public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {

    mRgba = inputFrame.rgba();
    Imgproc.cvtColor(mRgba, grayScaleImage, Imgproc.COLOR_RGBA2RGB);

    MatOfRect faces = new MatOfRect();

    // detect faces
    if (cascadeClassifier != null) {
      cascadeClassifier.detectMultiScale(
          grayScaleImage,
          faces,
          1.1,
          2,
          2,
          new Size(absoluteFaceSize, absoluteFaceSize),
          new Size());
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
      Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), new Scalar(0, 255, 0, 255), 3);

    if (facesArray.length > 0) {
      facesInASecond.add(true);
    } else {
      facesInASecond.add(false);
    }

    return mRgba;
  }
예제 #14
0
  public static void main(String[] args) {
    try {
      System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

      File input = new File("traffic_signal.jpg");
      BufferedImage image = ImageIO.read(input);

      byte[] data = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
      Mat mat = new Mat(image.getHeight(), image.getWidth(), CvType.CV_8UC3);
      mat.put(0, 0, data);

      Mat mat1 = new Mat(image.getHeight(), image.getWidth(), CvType.CV_8UC1);
      Imgproc.cvtColor(mat, mat1, Imgproc.COLOR_RGB2GRAY);

      byte[] data1 = new byte[mat1.rows() * mat1.cols() * (int) (mat1.elemSize())];
      mat1.get(0, 0, data1);
      BufferedImage image1 =
          new BufferedImage(mat1.cols(), mat1.rows(), BufferedImage.TYPE_BYTE_GRAY);
      image1.getRaster().setDataElements(0, 0, mat1.cols(), mat1.rows(), data1);

      File ouptut = new File("output\\grayscale_" + new Date().getTime() + ".jpg");
      ImageIO.write(image1, "jpg", ouptut);
    } catch (Exception e) {
      System.out.println("Error: " + e.getMessage());
    }
  }
예제 #15
0
    @Override
    protected Void doInBackground(Void... params) {
      publishProgress(0);
      Mat hsvImg = heatmap(data, progressDialog);
      Mat finishedImage = new Mat();
      Imgproc.cvtColor(hsvImg, finishedImage, Imgproc.COLOR_HSV2BGR);

      File mediaStorageDir =
          new File(Environment.getExternalStorageDirectory().getPath(), "images/Colored_Images");

      if (!mediaStorageDir.exists()) {
        if (!mediaStorageDir.mkdirs()) {
          Log.e(TAG, "failed to create directory");
          return null;
        }
      }

      String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
      Log.v(
          TAG,
          "SAVING: " + mediaStorageDir.getPath() + File.separator + "scan_" + timeStamp + ".jpg");
      Highgui.imwrite(
          mediaStorageDir.getPath() + File.separator + "scan_" + timeStamp + ".jpg", finishedImage);

      return null;
    }
예제 #16
0
    public void run() {
      do {
        synchronized (JavaCameraView.this) {
          try {
            JavaCameraView.this.wait();
          } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
          }
        }

        if (!mStopThread) {
          switch (mPreviewFormat) {
            case Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA:
              Imgproc.cvtColor(mBaseMat, mFrameChain[mChainIdx], Imgproc.COLOR_YUV2RGBA_NV21, 4);
              break;
            case Highgui.CV_CAP_ANDROID_GREY_FRAME:
              mFrameChain[mChainIdx] = mBaseMat.submat(0, mFrameHeight, 0, mFrameWidth);
              break;
            default:
              Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
          }
          ;
          if (!mFrameChain[mChainIdx].empty()) deliverAndDrawFrame(mFrameChain[mChainIdx]);
          mChainIdx = 1 - mChainIdx;
        }
      } while (!mStopThread);
      Log.d(TAG, "Finish processing thread");
    }
예제 #17
0
파일: Square.java 프로젝트: emre801/PokeEye
  private static Mat findLargestRectangle(Mat original_image) {
    Mat imgSource = original_image.clone();

    // convert the image to black and white
    Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY);

    // convert the image to black and white does (8 bit)
    Imgproc.Canny(imgSource, imgSource, 50, 50);

    // apply gaussian blur to smoothen lines of dots
    Imgproc.GaussianBlur(imgSource, imgSource, new Size(5, 5), 5);

    // find the contours
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(
        imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    double maxArea = -1;
    MatOfPoint temp_contour = contours.get(0); // the largest is at the
    // index 0 for starting
    // point
    MatOfPoint2f approxCurve = new MatOfPoint2f();
    List<MatOfPoint> largest_contours = new ArrayList<MatOfPoint>();
    for (int idx = 0; idx < contours.size(); idx++) {
      temp_contour = contours.get(idx);
      double contourarea = Imgproc.contourArea(temp_contour);
      // compare this contour to the previous largest contour found
      if (contourarea > maxArea) {
        // check if this contour is a square
        MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray());
        int contourSize = (int) temp_contour.total();
        Imgproc.approxPolyDP(new_mat, approxCurve, contourSize * 0.05, true);
        if (approxCurve.total() == 4) {
          maxArea = contourarea;
          largest_contours.add(temp_contour);
        }
      }
    }
    MatOfPoint temp_largest = largest_contours.get(largest_contours.size() - 1);
    largest_contours = new ArrayList<MatOfPoint>();

    largest_contours.add(temp_largest);

    // Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BayerBG2RGB);
    Imgproc.drawContours(original_image, largest_contours, -1, new Scalar(0, 255, 0), 10);

    // Mat perspectiveTransform = new Mat(3, 3, CvType.CV_32FC1);
    // Imgproc.warpPerspective(original_image, imgSource,
    // perspectiveTransform, new Size(300,300));

    Highgui.imwrite(output, original_image);

    // create the new image here using the largest detected square

    // Toast.makeText(getApplicationContext(), "Largest Contour: ",
    // Toast.LENGTH_LONG).show();

    return imgSource;
  }
 @Override
 public final Mat frame(Mat rgba, Mat gray) {
   if (!opModeStarted) return rgba;
   this.rgba = super.frame(rgba, gray);
   Imgproc.cvtColor(rgba, this.gray, Imgproc.COLOR_RGBA2GRAY);
   hasNewFrame = true;
   return rgba;
 }
예제 #19
0
  public MarkerTracker(Mat image) {
    this.image = image;

    imgGray = new Mat(image.size(), image.type());
    // Convert them to grayscale
    Imgproc.cvtColor(image, imgGray, Imgproc.COLOR_BGRA2GRAY);
    //  Core.normalize(imgGray, imgGray, 0, 255, Core.NORM_MINMAX);

  }
예제 #20
0
  public static Mat[] getMatArray(byte[] data, Camera camera) {
    Camera.Parameters parameters = camera.getParameters();
    int width = parameters.getPreviewSize().width;
    int height = parameters.getPreviewSize().height;

    Mat yuv = new Mat(height + height / 2, width, CvType.CV_8UC1);
    Mat rgba = new Mat(height, width, CvType.CV_8UC1);
    yuv.put(0, 0, data);
    Imgproc.cvtColor(yuv, rgba, Imgproc.COLOR_YUV2RGB_NV21);

    Mat rgbaResult = rotateMat(rgba);
    Mat grayResult = new Mat(rgbaResult.height(), rgbaResult.width(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgbaResult, grayResult, Imgproc.COLOR_BGR2GRAY);

    yuv.release();
    rgba.release();
    Mat[] result = {grayResult, rgbaResult};
    return result;
  }
 private void copyThresholdedImageToRgbImgMat(Mat thresholdedImgMat, Mat dest) {
   // convert thresholded image segment to RGB.
   Mat smallRegionImg = new Mat();
   Imgproc.cvtColor(thresholdedImgMat, smallRegionImg, Imgproc.COLOR_GRAY2BGRA, 4);
   // find location of image segment to be replaced in the destination image.
   Rect rect = calculateImageSegmentArea(dest);
   Mat destSubmat = dest.submat(rect.y, rect.y + rect.height, rect.x, rect.x + rect.width);
   // copy image.
   smallRegionImg.copyTo(destSubmat);
   smallRegionImg.release();
 }
예제 #22
0
    @Override
    protected Void doInBackground(final Void... unused) {

      Wrapper wrapper = new Wrapper();

      try {
        serverAddress = InetAddress.getByName(Constants.SERVER_IP);
        serverSocket = new Socket();
        serverSocket.connect(
            new InetSocketAddress(Constants.SERVER_IP, Constants.SERVER_PORT), 5000);
      } catch (Exception e) {
        e.printStackTrace();
      }

      wrapper.type = 0;
      wrapper.status = serverSocket.isConnected();
      publishProgress(wrapper);

      try {
        Thread.sleep(500);

        dataInputStream = new DataInputStream(serverSocket.getInputStream());
        dataOutputStream = new DataOutputStream(serverSocket.getOutputStream());

        wrapper.type = 1;

        while (serverSocket.isConnected()) {
          bytes = 0;

          size = dataInputStream.readInt();
          data = new byte[size];

          for (int i = 0; i < size; i += bytes) {
            bytes = dataInputStream.read(data, i, size - i);
          }

          buff = new Mat(1, size, CvType.CV_8UC1);
          buff.put(0, 0, data);

          rev = Highgui.imdecode(buff, Highgui.CV_LOAD_IMAGE_UNCHANGED);

          Imgproc.cvtColor(rev, ret, Imgproc.COLOR_RGB2BGR);

          wrapper.img = ret;
          publishProgress(wrapper);
          Thread.sleep(75);
        }

      } catch (Exception e) {
        e.printStackTrace();
      }

      return null;
    }
예제 #23
0
  /**
   * @param image
   * @param size
   * @return Mat
   */
  public static Mat cannyDetection(Mat image, int size) {

    Mat grayImage = new Mat();
    Mat detectedEdges = new Mat();
    // convert to grayscale
    Imgproc.cvtColor(image, grayImage, Imgproc.COLOR_BGR2GRAY);
    // reduce noise with a 3x3 kernel
    Imgproc.blur(grayImage, detectedEdges, new Size(3, 3));
    // canny detector, with ratio of lower:upper threshold of 3:1
    Imgproc.Canny(detectedEdges, detectedEdges, size, size / 3, 7, false);
    return detectedEdges;
  }
예제 #24
0
  public static Mat Histogram(Mat im) {

    Mat img = im;

    Mat equ = new Mat();
    img.copyTo(equ);
    // Imgproc.blur(equ, equ, new Size(3, 3));

    Imgproc.cvtColor(equ, equ, Imgproc.COLOR_BGR2YCrCb);
    List<Mat> channels = new ArrayList<Mat>();
    Core.split(equ, channels);
    Imgproc.equalizeHist(channels.get(0), channels.get(0));
    Core.merge(channels, equ);
    Imgproc.cvtColor(equ, equ, Imgproc.COLOR_YCrCb2BGR);

    Mat gray = new Mat();
    Imgproc.cvtColor(equ, gray, Imgproc.COLOR_BGR2GRAY);
    Mat grayOrig = new Mat();
    Imgproc.cvtColor(img, grayOrig, Imgproc.COLOR_BGR2GRAY);
    System.out.println("Histogram work ///");
    return grayOrig;
  }
예제 #25
0
 public Bitmap convertMatToBitmap(Mat seedsImage) {
   Bitmap bmp = null;
   Mat tmp = new Mat(seedsImage.height(), seedsImage.width(), CvType.CV_8U, new Scalar(4));
   try {
     // Imgproc.cvtColor(seedsImage, tmp, Imgproc.COLOR_RGB2BGRA);
     Imgproc.cvtColor(seedsImage, tmp, Imgproc.COLOR_GRAY2RGBA, 4);
     bmp = Bitmap.createBitmap(tmp.cols(), tmp.rows(), Bitmap.Config.ARGB_8888);
     Utils.matToBitmap(tmp, bmp);
   } catch (CvException e) {
     Log.d("Exception", e.getMessage());
   }
   return bmp;
 }
예제 #26
0
  public Mat onCameraFrame(Mat inputFrame) {

    if (takeScanBase) {
      if (scanBase != null) {
        scanBase.release();
      }
      scanBase = new Mat();
      scanningMat = new Mat();

      Imgproc.cvtColor(inputFrame, scanBase, Imgproc.COLOR_RGB2GRAY);
      Imgproc.cvtColor(scanBase, scanningMat, Imgproc.COLOR_GRAY2RGB);
      scanBase.release();

      hasScanBase = true;
      takeScanBase = false;
    }

    if (hasScanBase) {
      Mat ret;

      Point p = findLaser(inputFrame);
      inputFrame.release();

      if (p != null) {
        if (hasThermal) {
          double temp = mThermal.read();
          data[(int) p.y][(int) p.x] = (float) temp;

          Core.circle(scanningMat, p, 3, new Scalar(255, 0, 0), -3);

        } else {
          // Log.v(TAG, "No thermal!");
        }
      }

      return scanningMat;
    } else return inputFrame;
  }
예제 #27
0
  /**
   * @param source
   * @param delta
   * @return Mat
   */
  public static Mat Sobel(Mat source, int delta) {

    Mat grey = new Mat();
    Imgproc.cvtColor(source, grey, Imgproc.COLOR_BGR2GRAY);
    Mat sobelx = new Mat();
    Imgproc.Sobel(grey, sobelx, CvType.CV_32F, 1, delta);

    double minVal, maxVal;
    Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(sobelx);
    minVal = minMaxLocResult.minVal;
    maxVal = minMaxLocResult.maxVal;

    Mat draw = new Mat();
    sobelx.convertTo(
        draw, CvType.CV_8U, 255.0 / (maxVal - minVal), -minVal * 255.0 / (maxVal - minVal));
    return draw;
  }
예제 #28
0
  private void prepareSpherify(Bitmap bitmap) {

    int insideCircleOutRadius;
    int topY, footY, withinHeight;

    if (!OpenCVLoader.initDebug()) {
      // Handle initialization error
      AppFunctions.showToast(activity.getApplicationContext(), "OpenGL initialization error!");
      activity.finish();
    }

    if (srcImage == null) {
      srcImage = new Mat();

      srcImage.create(bitmap.getHeight(), bitmap.getWidth(), CvType.CV_8UC3);
      Bitmap myBitmap32 = bitmap.copy(Bitmap.Config.ARGB_8888, true);
      Utils.bitmapToMat(myBitmap32, srcImage);

      Imgproc.cvtColor(srcImage, srcImage, Imgproc.COLOR_BGR2RGB, 4);
    }

    Utils.bitmapToMat(bitmap, srcImage);
    // cropImage();
    seamlessEdges(srcImage);
    mSrcWidth = srcImage.cols();
    mSrcHeight = srcImage.rows();

    halfGenImageSize = genImageSize / 2;
    spherifiedImage = new Mat();
    spherifiedImage.create(genImageSize, genImageSize, CvType.CV_8UC4);

    insideCircleOutRadius = (int) (genImageSize / 12);

    topY = (int) (mSrcHeight * (topMargin));
    footY = (int) (mSrcHeight * (footMargin));
    withinHeight = topY - footY;
    scale = withinHeight / ((double) (croppedImageSize / 2 - insideCircleOutRadius));
    offset = (int) (footY - scale * insideCircleOutRadius);
    numProcesses = Runtime.getRuntime().availableProcessors();
    if (numProcesses < 3) numProcesses = 1;
    else numProcesses = 3;
  }
예제 #29
0
  /**
   * Generates a mask of all resistors present in the given Mat. Also displays this mask in the
   * Bottom Left frame of the GUI.
   *
   * @param imgCap The Mat image to generate the mask for
   * @param type The threshold operation type
   * @return The mask as a Mat
   */
  private Mat generateResistorMask(Mat imgCap, int type) {
    Mat imgHSV = new Mat();
    Mat satImg = new Mat();

    // convert the input image from BGR to HSV
    Imgproc.cvtColor(imgCap, imgHSV, Imgproc.COLOR_BGR2HSV);

    ArrayList<Mat> channels = new ArrayList<Mat>();
    Core.split(imgHSV, channels);
    // extract the saturation channel
    satImg = channels.get(1);

    // remove the background and the resistor leads (combined with previous blurring)
    // thresh ~86
    Imgproc.threshold(satImg, satImg, RESISTOR_MASK_THRESHOLD, 255, type);

    paintBL(satImg);

    return satImg;
  }
  @Override
  public void onOpenCVLoaded() {
    faceDetector = new FaceDetector(TrainerEditorActivity.this);

    Intent intent = getIntent();
    long frameRgbaAddr = intent.getLongExtra("frameRgbaAddr", 0);
    Mat temp = new Mat(frameRgbaAddr);
    frameRgba = temp.clone();
    frameGray = frameRgba.clone();
    Imgproc.cvtColor(frameRgba, frameGray, Imgproc.COLOR_BGR2GRAY);

    faces = faceDetector.detectFaces(frameGray);
    labels.clear();
    for (int i = 0; i < faces.size(); ++i) {
      labels.add("");
    }

    TrainerEditorAdapter trainerEditorAdapter =
        new TrainerEditorAdapter(this, faces, labels, frameRgba);
    listView.setAdapter(trainerEditorAdapter);
  }