private static double perceptualHash(String sourceImagePath, String targetImagePath) {

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    Mat matSrc = Imgcodecs.imread(sourceImagePath, Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat matTrgt = Imgcodecs.imread(targetImagePath, Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat matSrcResize = new Mat();
    Mat matTrgtResize = new Mat();
    Imgproc.resize(
        matSrc,
        matSrcResize,
        new Size(matSrc.width(), matSrc.height()),
        0,
        0,
        Imgproc.INTER_NEAREST);
    Imgproc.resize(
        matTrgt,
        matTrgtResize,
        new Size(matTrgt.width(), matTrgt.height()),
        0,
        0,
        Imgproc.INTER_LANCZOS4);
    Mat matSrcDst = new Mat();
    Mat matTrgtDst = new Mat();
    Imgproc.resize(matSrcResize, matSrcDst, new Size(8, 8), 0, 0, Imgproc.INTER_CUBIC);
    Imgproc.resize(matTrgtResize, matTrgtDst, new Size(8, 8), 0, 0, Imgproc.INTER_CUBIC);
    Imgproc.cvtColor(matSrcDst, matSrcDst, Imgproc.COLOR_BGR2GRAY);
    Imgproc.cvtColor(matTrgtDst, matTrgtDst, Imgproc.COLOR_BGR2GRAY);

    int iAvgSrc = 0, iAvgTrgt = 0;
    int[] arrSrc = new int[64];
    int[] arrTrgt = new int[64];
    for (int i = 0; i < 8; i++) {
      byte[] dataSrc = new byte[8];
      matSrcDst.get(i, 0, dataSrc);
      byte[] dataTrgt = new byte[8];
      matTrgtDst.get(i, 0, dataTrgt);

      int tmp = i * 8;
      for (int j = 0; j < 8; j++) {
        int tmpSrc = tmp + j;
        arrSrc[tmpSrc] = dataSrc[j] / 4 * 4;
        arrTrgt[tmpSrc] = dataTrgt[j] / 4 * 4;
        iAvgSrc += arrSrc[tmpSrc];
        iAvgTrgt += arrTrgt[tmpSrc];
      }
    }

    iAvgSrc /= 64;
    iAvgTrgt /= 64;
    for (int i = 0; i < 64; i++) {
      arrSrc[i] = (arrSrc[i] >= iAvgSrc) ? 1 : 0;
      arrTrgt[i] = (arrTrgt[i] >= iAvgTrgt) ? 1 : 0;
    }
    int iDiffNum = 0;
    for (int i = 0; i < 64; i++) if (arrSrc[i] != arrTrgt[i]) ++iDiffNum;

    return 1.0 - (double) iDiffNum / 64;
  }
Example #2
0
  public static void draw_opticflow(Mat flow, Mat cflowmap, int step, Scalar color) {
    int i = 0, j = 0;
    for (int y = 0; y < cflowmap.rows(); y += step) {
      for (int x = 0; x < cflowmap.cols(); x += step) {

        double[] fxy = Sample1View.flow.get(50, 50);

        // for testing, getting change in values sa frames at point 50,50
        // result : fxy ng.change ang value, however, kun i.add na ang 50, mura xag ma.zero??
        i = (int) Math.round(fxy[0]);
        j = (int) (50.00 + fxy[1]);

        Core.putText(
            cflowmap,
            Double.toString(j),
            new Point(10, 100),
            3 /* CV_FONT_HERSHEY_COMPLEX */,
            2,
            new Scalar(255, 0, 0, 255),
            3);

        // Core.line(cflowmap, new Point(x,y), new Point(i,j), new Scalar(0,0,255), 4);
        // Core.circle(cflowmap, new Point(i,j), 2, new Scalar(0,0,255), -1);
        // Core.circle(cflowmap, new Point(x,y), 2, color, -1);
        // System.out.print("fxy"+i+" "+j+"\n");

        // Core.circle(cflowmap, new Point(cflowmap.width(), 50), 5, new Scalar(255,255,255), -1);
        if (i < (cflowmap.width() / 2) && i != 0.0) {
          Core.circle(cflowmap, new Point(i, j), 2, new Scalar(0, 0, 255), -1);
          Core.putText(
              cflowmap,
              "LEFT",
              new Point(10, 100),
              3 /* CV_FONT_HERSHEY_COMPLEX */,
              2,
              new Scalar(255, 255, 255, 255),
              3);

        } else if (i > (cflowmap.width() / 2) && i != x) {
          Core.circle(cflowmap, new Point(i, j), 2, new Scalar(0, 0, 255), -1);
          Core.putText(
              cflowmap,
              "RIGHT",
              new Point(10, 100),
              3 /* CV_FONT_HERSHEY_COMPLEX */,
              2,
              new Scalar(255, 255, 255, 255),
              3);
        }
      }
    }
  }
Example #3
0
  /* Verifica si el punto esta en la parte
   * derecha de la imagen
   */
  private boolean isInR(Point center) {
    // Esquina superior izq de la region
    Point p0 = new Point();
    p0.x = (mRgba.width() / 4) * 3;
    p0.y = 0;

    // Esquina inferior derecha de la region
    Point p1 = new Point();
    p1.x = mRgba.width();
    p1.y = mRgba.height();

    return (p0.x <= center.x && center.x <= p1.x) && (p0.y <= center.y && center.y <= p1.y);
  }
  public void processFrame(CameraEvents.Frame frameEvent) {
    if (service.getActivityMode() != ActivityEvent.Mode.WARP) return;
    if (mode == Mode.SAMPLEWARPPLANE || mode == Mode.SAMPLEWARPGLASS) {
      synchronized (this) {
        if (!isSetup) setupMatrices();
        if (captureSample) {
          captureSample = false;
          Log.d(TAG, "Warp: Capturing Sample");
          Mat frame = frameEvent.getCameraFrame().getRGB();
          byte[] frameJPEG = frameEvent.getCameraFrame().getJPEG();
          if (sampleBGR == null
              || sampleBGR.height() != frame.height()
              || sampleBGR.width() != frame.width())
            sampleBGR = new Mat(frame.rows(), frame.cols(), CvType.CV_8UC3);
          Imgproc.cvtColor(frame, sampleBGR, Imgproc.COLOR_RGB2BGR);
          useSample = true;
          // TODO: Specialize it for this group/device
          com.dappervision.wearscript.Utils.eventBusPost(
              new SendEvent("warpsample", "", ValueFactory.createRawValue(frameJPEG)));
        }
      }
    }

    if (busy) return;
    synchronized (this) {
      busy = true;
      if (!isSetup) setupMatrices();
      if (mode == Mode.CAM2GLASS) {
        Mat inputBGR;
        Mat frame = frameEvent.getCameraFrame().getRGB();
        if (frameBGR == null
            || frameBGR.height() != frame.height()
            || frameBGR.width() != frame.width())
          frameBGR = new Mat(frame.rows(), frame.cols(), CvType.CV_8UC3);
        Mat hSmallToGlassMat = getHSmallToGlassMat(frame.rows(), frame.cols());
        if (hSmallToGlassMat == null) {
          Log.w(TAG, "Warp: Bad size");
          busy = false;
          return;
        }
        Imgproc.cvtColor(frame, frameBGR, Imgproc.COLOR_RGB2BGR);
        inputBGR = frameBGR;
        Imgproc.warpPerspective(
            inputBGR, frameWarp, hSmallToGlassMat, new Size(frameWarp.width(), frameWarp.height()));
        drawFrame(frameWarp);
      }
      busy = false;
    }
  }
  void drawFrame(Mat modified) {
    // Partly from OpenCV CameraBridgeViewBase.java
    if (mCacheBitmap == null) {
      mCacheBitmap =
          Bitmap.createBitmap(modified.width(), modified.height(), Bitmap.Config.ARGB_8888);
    }
    boolean bmpValid = true;
    if (modified != null) {
      try {
        Utils.matToBitmap(modified, mCacheBitmap);
      } catch (Exception e) {
        Log.e(TAG, "Mat type: " + modified);
        Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
        Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
        bmpValid = false;
      }
    }
    if (bmpValid && mCacheBitmap != null) {
      Canvas canvas = view.getHolder().lockCanvas();
      if (canvas != null) {
        canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
        canvas.drawBitmap(
            mCacheBitmap,
            new Rect(0, 0, mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
            new Rect(
                (canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
                (canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
                (canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
                (canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()),
            null);

        view.getHolder().unlockCanvasAndPost(canvas);
      }
    }
  }
  public List<ArtifactDetectedObject> detectObjects(BufferedImage bImage) {
    List<ArtifactDetectedObject> detectedObjectList = new ArrayList<>();
    Mat image = OpenCVUtils.bufferedImageToMat(bImage);
    if (image != null) {
      MatOfRect faceDetections = new MatOfRect();
      double width = image.width();
      double height = image.height();
      for (CascadeClassifierHolder objectClassifier : objectClassifiers) {
        objectClassifier.cascadeClassifier.detectMultiScale(image, faceDetections);

        for (Rect rect : faceDetections.toArray()) {
          ArtifactDetectedObject detectedObject =
              new ArtifactDetectedObject(
                  rect.x / width,
                  rect.y / height,
                  (rect.x + rect.width) / width,
                  (rect.y + rect.height) / height,
                  objectClassifier.conceptIRI,
                  PROCESS);
          detectedObjectList.add(detectedObject);
        }
      }
    }
    return detectedObjectList;
  }
  public Mat onCameraFrame(Mat inputFrame) {
    inputFrame.copyTo(mRgba);
    Point center = new Point(mRgba.width() / 2, mRgba.height() / 2);
    double angle = -90;
    double scale = 1.0;

    Mat mapMatrix = Imgproc.getRotationMatrix2D(center, angle, scale);
    Imgproc.warpAffine(mRgba, mGrayMat, mapMatrix, mRgba.size(), Imgproc.INTER_LINEAR);
    return mGrayMat;
  }
 @Override
 public Mat process(Mat input) {
   Bitmap resultBitmap = Bitmap.createBitmap(input.width(), input.height(), Bitmap.Config.RGB_565);
   Utils.matToBitmap(input, resultBitmap);
   try {
     writeBitmapToStorage(resultBitmap);
   } catch (IOException e) {
     Log.e(this.getClass().getName(), e.getMessage());
   }
   return input;
 }
 public void onEventAsync(WarpHEvent event) {
   double[] h = event.getH();
   synchronized (this) {
     if ((mode == Mode.SAMPLEWARPPLANE || mode == Mode.SAMPLEWARPGLASS) && useSample) {
       if (!isSetup) setupMatrices();
       double hSmallToGlass[] = getHSmallToGlass(sampleBGR.height(), sampleBGR.width());
       if (hSmallToGlass == null) {
         Log.w(TAG, "Warp: Bad size");
         return;
       }
       Log.d(TAG, "Warp: WarpHEvent");
       if (mode == Mode.SAMPLEWARPGLASS) {
         h = HMult(hSmallToGlass, h);
       }
       Mat hMat = HMatFromArray(h);
       Imgproc.warpPerspective(
           sampleBGR, frameWarp, hMat, new Size(frameWarp.width(), frameWarp.height()));
       drawFrame(frameWarp);
     }
   }
 }
 public Bitmap convertMatToBitmap(Mat seedsImage) {
   Bitmap bmp = null;
   Mat tmp = new Mat(seedsImage.height(), seedsImage.width(), CvType.CV_8U, new Scalar(4));
   try {
     // Imgproc.cvtColor(seedsImage, tmp, Imgproc.COLOR_RGB2BGRA);
     Imgproc.cvtColor(seedsImage, tmp, Imgproc.COLOR_GRAY2RGBA, 4);
     bmp = Bitmap.createBitmap(tmp.cols(), tmp.rows(), Bitmap.Config.ARGB_8888);
     Utils.matToBitmap(tmp, bmp);
   } catch (CvException e) {
     Log.d("Exception", e.getMessage());
   }
   return bmp;
 }
Example #11
0
    private void interestingThumbnail(Bitmap image) {
      // image into OpenCV
      Mat imageMat = new Mat();
      bitmapToMat(image, imageMat);

      // find slice of interest
      /*Mat rowSums = new Mat(imageMat.height(), 1, CvType.CV_16U);
      for(int y = (int)(0.1*image.getHeight()); y < image.getHeight(); y++) {
          Mat row = new Mat(imageMat, new Rect(0, 0, imageMat.width(), imageMat.height()));
          rowSums.put(y, 1, (int)Core.sumElems(row).val[0]);
      }

      final int binSize = 32;
      double current = 0;
      int startPos = 0;
      for(int y = 0; y < rowSums.cols() - binSize; y++) {
          Mat binned = new Mat(rowSums, new Rect(y, 0, binSize, 1));
          double average = Core.sumElems(binned).val[0] / binSize;

          if(average > current)
              startPos = y;
          current = average;
      }

      // ensure not out of bounds
      if(startPos > imageMat.height() - binSize) startPos = imageMat.height() - binSize;

      // get slice of interest
      Rect roi = new Rect(0, startPos, imageMat.width(), binSize);
      Mat croppedRef = new Mat(imageMat, roi);
      Mat croppedMat = new Mat(croppedRef.width(), croppedRef.height(), croppedRef.type());
      croppedRef.copyTo(croppedMat);*/

      // image out of OpenCV
      // thumbnail = Bitmap.createBitmap(roi.width, roi.height, image.getConfig());
      thumbnail = Bitmap.createBitmap(imageMat.width(), 64, image.getConfig());
      thumbnail.eraseColor(
          Color.argb(
              255,
              (int) (Math.random() * 255),
              (int) (Math.random() * 255),
              (int) (Math.random() * 255)));
      // matToBitmap(croppedRef, thumbnail);
    }
Example #12
0
 public void onEvent(WarpDrawEvent event) {
   synchronized (this) {
     if ((mode == Mode.SAMPLEWARPPLANE || mode == Mode.SAMPLEWARPGLASS) && useSample) {
       if (!isSetup) setupMatrices();
       double hGlassToSmall[] = getHGlassToSmall(sampleBGR.height(), sampleBGR.width());
       if (hGlassToSmall == null) {
         Log.w(TAG, "Warp: Bad size");
         return;
       }
       double circleCenter[] = {event.getX(), event.getY(), 1};
       double circleCenterSmall[] = HMultPoint(hGlassToSmall, circleCenter);
       Core.circle(
           sampleBGR,
           new Point(circleCenterSmall[0], circleCenterSmall[1]),
           event.getRadius(),
           new Scalar(event.getR(), event.getG(), event.getB()));
     }
   }
 }
Example #13
0
  public static Mat[] getMatArray(byte[] data, Camera camera) {
    Camera.Parameters parameters = camera.getParameters();
    int width = parameters.getPreviewSize().width;
    int height = parameters.getPreviewSize().height;

    Mat yuv = new Mat(height + height / 2, width, CvType.CV_8UC1);
    Mat rgba = new Mat(height, width, CvType.CV_8UC1);
    yuv.put(0, 0, data);
    Imgproc.cvtColor(yuv, rgba, Imgproc.COLOR_YUV2RGB_NV21);

    Mat rgbaResult = rotateMat(rgba);
    Mat grayResult = new Mat(rgbaResult.height(), rgbaResult.width(), CvType.CV_8UC1);
    Imgproc.cvtColor(rgbaResult, grayResult, Imgproc.COLOR_BGR2GRAY);

    yuv.release();
    rgba.release();
    Mat[] result = {grayResult, rgbaResult};
    return result;
  }
Example #14
0
 public void dibujarRegiones() {
   // **** Crear los cuadros de cada region (para debugging) ***
   // L izquierda
   Point pt1 = new Point(0, 0);
   Point pt2 = new Point(mRgba.width() / 4, mRgba.height());
   Core.rectangle(mRgba, pt1, pt2, WHITE);
   // C centro-arriba
   Point pt3 = new Point(mRgba.width() / 4, 0);
   Point pt4 = new Point((mRgba.width() / 4) * 3, (mRgba.height() / 4) * 3);
   Core.rectangle(mRgba, pt3, pt4, WHITE);
   // N centro-abajo
   Point pt5 = new Point(mRgba.width() / 4, (mRgba.height() / 4) * 3);
   Point pt6 = new Point((mRgba.width() / 4) * 3, mRgba.height());
   Core.rectangle(mRgba, pt5, pt6, WHITE);
   // R derecha
   Point pt7 = new Point((mRgba.width() / 4) * 3, 0);
   Point pt8 = new Point(mRgba.width(), mRgba.height());
   Core.rectangle(mRgba, pt7, pt8, WHITE);
 }
Example #15
0
  private void seamlessEdges(Mat image) {
    int imageHeight = image.height();
    int imageWidth = image.width();
    double smoothRange = smoothValue * (imageWidth / 2000f);
    double smoothRangeHalf = smoothRange / 2;
    double pixel1[];
    double pixel2[];
    double tempPixel[] = new double[4];
    tempPixel[3] = 255;

    int i, j;
    for (i = 0; i < smoothRangeHalf; i++) {
      for (j = 0; j < imageHeight; j++) {
        pixel1 = image.get(j, i);
        pixel2 = image.get(j, imageWidth - i - 1);
        tempPixel[0] =
            pixel1[0] * ((smoothRangeHalf + i) / smoothRange)
                + pixel2[0] * ((smoothRangeHalf - i) / smoothRange);
        tempPixel[1] =
            pixel1[1] * ((smoothRangeHalf + i) / smoothRange)
                + pixel2[1] * ((smoothRangeHalf - i) / smoothRange);
        tempPixel[2] =
            pixel1[2] * ((smoothRangeHalf + i) / smoothRange)
                + pixel2[2] * ((smoothRangeHalf - i) / smoothRange);
        pixel2[0] =
            pixel2[0] * ((smoothRangeHalf + i) / smoothRange)
                + pixel1[0] * ((smoothRangeHalf - i) / smoothRange);
        pixel2[1] =
            pixel2[1] * ((smoothRangeHalf + i) / smoothRange)
                + pixel1[1] * ((smoothRangeHalf - i) / smoothRange);
        pixel2[2] =
            pixel2[2] * ((smoothRangeHalf + i) / smoothRange)
                + pixel1[2] * ((smoothRangeHalf - i) / smoothRange);
        image.put(j, i, tempPixel);
        image.put(j, imageWidth - i - 1, pixel2);
      }
    }
  }
Example #16
0
  public void run() {
    ArrayList<Geometry.Quad> squares;

    Mat image = new Mat();
    Utils.bitmapToMat(source, image);

    Mat bwimage = new Mat();
    cvtColor(image, bwimage, COLOR_RGB2GRAY);

    Mat blurred = new Mat();
    medianBlur(image, blurred, 9);

    int width = blurred.width();
    int height = blurred.height();
    int depth = blurred.depth();

    Mat gray0 = new Mat(width, height, depth);
    blurred.copyTo(gray0);

    squares = new ArrayList<Geometry.Quad>();

    // find squares in every color plane of the image
    for (int c = 0; c < 3; c++) {
      Core.mixChannels(
          Arrays.asList(blurred), Arrays.asList(new Mat[] {gray0}), new MatOfInt(c, 0));

      // try several threshold levels
      int thresholdLevel = 8;
      for (int l = 0; l < thresholdLevel; l++) {
        // use canny instead of 0 threshold level
        // canny helps catch squares with gradient shading
        Mat gray = new Mat();

        if (l == 0) {
          Canny(gray0, gray, 10.0, 20.0, 3, false);
          Mat kernel = new Mat(11, 11, CvType.CV_8UC1, new Scalar(1));
          dilate(gray, gray, kernel);
        } else {
          Mat thresh = new Mat(gray0.rows(), gray0.cols(), gray0.type());
          threshold(gray0, thresh, ((double) l) / thresholdLevel * 255, 128, THRESH_BINARY_INV);
          cvtColor(thresh, gray, COLOR_BGR2GRAY);
        }

        // find contours and store them in a list
        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        findContours(gray, contours, new Mat(), RETR_LIST, CHAIN_APPROX_SIMPLE);

        // test contours
        for (MatOfPoint contour : contours) {
          // approximate contour with accuracy proportional to the contour perimeter
          MatOfPoint2f thisContour = new MatOfPoint2f(contour.toArray());
          double arclength = 0.02 * arcLength(thisContour, true);
          MatOfPoint2f approx = new MatOfPoint2f();
          approxPolyDP(thisContour, approx, arclength, true);

          double area = contourArea(approx);
          boolean isConvex = isContourConvex(new MatOfPoint(approx.toArray()));

          if (approx.rows() == 4 && Math.abs(area) > SQUARE_SIZE && isConvex) {
            double maxCosine = 0;

            Point[] approxArray = approx.toArray();
            for (int j = 2; j < 5; j++) {
              double cosine =
                  Math.abs(angle(approxArray[j % 4], approxArray[j - 2], approxArray[j - 1]));
              maxCosine = Math.max(maxCosine, cosine);
            }

            if (maxCosine > THRESHOLD_COS) {
              squares.add(new Geometry.Quad(approxArray));
              Log.d(TAG, "area = " + area);
            }
          }
        }
      }
    }

    result = new Bundle();
    result.putParcelableArrayList("squares", squares);
    Log.d(TAG, "result created");

    finish();
  }
Example #17
0
  public Mat cutFrame(
      Mat image,
      double resdpi,
      int frameWidthPix,
      int frameHeightPix,
      boolean reverseImage,
      int frameNum,
      double scaleMult,
      boolean rescale,
      boolean correctrotation) {
    noCut = false;
    if (farEdge == null || farEdge.stdDev > worstEdgeStdDevAllowed) {
      System.out.println(
          "WARNING: far film edge for frame "
              + frameNum
              + " has a stdDev of "
              + (farEdge == null ? "null" : Double.toString(farEdge.stdDev))
              + " and will not be used.");

      if (sprocketEdge == null || sprocketEdge.stdDev > worstEdgeStdDevAllowed) {
        System.out.println(
            "WARNING: near film edge for frame "
                + frameNum
                + " has a stdDev of "
                + (sprocketEdge == null ? "null" : Double.toString(sprocketEdge.stdDev))
                + " and will not be used.");
        noCut = true;
        return null;
      } else
        preMapSetupUsingSprocketEdge(
            frameWidthPix, frameHeightPix, scaleMult, reverseImage, rescale, correctrotation);
    } else
      preMapSetupUsingFarEdge(
          frameWidthPix, frameHeightPix, scaleMult, reverseImage, rescale, correctrotation);

    outOfBounds = false;

    CvRaster srcraster = CvRaster.create(image.height(), image.width(), image.type());
    srcraster.loadFrom(image);
    CvRaster dstraster = CvRaster.create(frameHeightPix, frameWidthPix, srcraster.type);

    int srcwidth = srcraster.cols;
    int srcheight = srcraster.rows;

    int dstwidth = dstraster.cols;
    int dstheight = dstraster.rows;

    for (int dstRow = 0; dstRow < dstheight; dstRow++) {
      for (int dstCol = 0; dstCol < dstwidth; dstCol++) {
        Point srclocation = map(dstRow, dstCol, frameWidthPix, frameHeightPix, reverseImage);

        if (leftmostCol > srclocation.x) leftmostCol = srclocation.x;
        if (rightmostCol < srclocation.x) rightmostCol = srclocation.x;
        if (topmostRow > srclocation.y) topmostRow = srclocation.y;
        if (bottommostRow < srclocation.y) bottommostRow = srclocation.y;

        if (srclocation.y < 0
            || srclocation.y >= srcheight
            || srclocation.x < 0
            || srclocation.x >= srcwidth) {
          dstraster.zero(dstRow, dstCol);
          outOfBounds = true;
        } else dstraster.set(dstRow, dstCol, srcraster.get(srclocation.y, srclocation.x));
      }
    }

    frameCut = true;

    return dstraster.toMat();
  }
Example #18
0
  public static Bitmap matToBitmap(Mat m) {
    Bitmap b = Bitmap.createBitmap(m.width(), m.height(), Bitmap.Config.ARGB_8888);
    Utils.matToBitmap(m, b);

    return b;
  }
Example #19
0
  public void btn_camera_ok(View view) {
    Log.i(TAG, "btn_camera_ok");
    if (!bPictaken) {
      ToastUtil.showShortToast(getApplicationContext(), "亲要先进行拍照哟!");
      return; // do not forget!
    }
    // async do -> runtime exception: method called after release()
    // maybe the activity releases before the thread returns!
    final Mat image = Highgui.imread(filePath);
    int width = image.width();
    int height = image.height();
    if (width > height) { // portrait should be rotated! direction? yes!
      Core.flip(image.t(), image, 0); // counter-clock wise 90
    }
    Imgproc.cvtColor(image, image, Imgproc.COLOR_BGR2GRAY); // gray
    Imgproc.resize(image, image, new Size(CommonUtil.IMAGE_WIDTH, CommonUtil.IMAGE_HEIGHT)); //
    int total = 0;
    String stotal = CommonUtil.userProps.getProperty("total");
    if (null != stotal) { // have some users!
      total = Integer.parseInt(stotal);
    }
    if (userid <= 0) { // not have this one!
      userid = total + 1;
      try { // save new data!
        CommonUtil.userProps.setProperty("total", String.valueOf(userid));
        CommonUtil.userProps.setProperty(String.valueOf(userid), name);
        CommonUtil.saveUserProperties(CommonUtil.userProps);
      } catch (Exception e) {
        e.printStackTrace();
      }
      // creat folder for this user!
      File userfolder =
          new File(
              CommonUtil.USERFOLDER.getAbsolutePath() + File.separator + String.valueOf(userid));
      if (!userfolder.exists()) {
        userfolder.mkdir();
      }
    }
    filePath =
        CommonUtil.USERFOLDER.getAbsolutePath()
            + File.separator
            + String.valueOf(userid)
            + File.separator
            + System.currentTimeMillis()
            + ".jpg"; // folder (user / userid)
    Highgui.imwrite(filePath, image);
    // save data to facedata.txt
    String data = filePath + ";" + userid + "\n"; // user image file path;user id
    try {
      RandomAccessFile facedataFile =
          new RandomAccessFile(
              CommonUtil.SDFOLDER + File.separator + CommonUtil.FACEDATA_FILENAME, "rw");
      facedataFile.seek(facedataFile.length());
      facedataFile.write(data.getBytes());
      facedataFile.close();
    } catch (FileNotFoundException e) {
      e.printStackTrace();
    } catch (IOException e) {
      e.printStackTrace();
    }
    Log.i(TAG, "image process ok");

    // add this pic to the model data
    new AsyncTask<Void, Void, Boolean>() {

      @Override
      protected Boolean doInBackground(Void... params) {
        xface.addImage(image, userid); // how to determinate the result of adding image?!TODO!
        return true;
      }

      @Override
      protected void onPostExecute(Boolean result) {
        if (result) {
          ToastUtil.showShortToast(getApplicationContext(), "照片保存成功,模型建立好咯!");
        } else {
          ToastUtil.showShortToast(getApplicationContext(), "照片保存成功,模型建立失败啦!");
        }
        btn_camera_ok.setEnabled(true);
      }

      @Override
      protected void onPreExecute() {
        ToastUtil.showShortToast(getApplicationContext(), "照片保存中...");
        btn_camera_ok.setEnabled(false); // can not let user save two images at the same time!
      }
    }.execute();
  }