Esempio n. 1
0
  public IplImage correctGamma(IplImage img) {
    // get image intensity
    // img.applyGamma(d);
    IplImage gray = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
    cvCvtColor(img, gray, CV_RGB2GRAY);
    BufferedImage buffImg = img.getBufferedImage();
    BufferedImage buffGray = gray.getBufferedImage();
    double grayArr[] = new double[gray.width() * gray.height()];
    int counter = 0;
    for (int i = 0; i < gray.width(); i++) {
      for (int j = 0; j < gray.height(); j++) {
        grayArr[counter] = buffGray.getRGB(i, j);
        counter++;
      }
    }

    double imgSd = new StandardDeviation().evaluate(grayArr);
    double imgMean = new Mean().evaluate(grayArr);
    double y = 0;
    if (imgMean > 0.5) y = 1 + (Math.abs(0.5 - imgMean) / imgSd);
    else y = 1 / (1 + (Math.abs(0.5 - imgMean) / imgSd));

    img.applyGamma(y);

    return img;
  }
  public videoPanel() {
    cr = new CameraReader();
    cr.Start();
    this.size = new Dimension(cr.getColorFrame().width() + 20, cr.getColorFrame().height() + 50);

    iv = cr.getColorFrame();
    this.add(new JLabel(new ImageIcon(iv.getBufferedImage())));

    // this.run();
  }
 public void run() {
   isShutdown = false;
   while (!isShutdown) {
     try {
       // long start = System.currentTimeMillis();
       IplImage image = grabber.grab();
       if (image != null) {
         img = image.getBufferedImage();
         // System.out.println(System.currentTimeMillis() - start + "ms");
       } else {
         // System.out.println(System.currentTimeMillis() - start + "ms failed");
       }
     } catch (Exception e) {
       e.printStackTrace();
     }
   }
 }
  public void start() {
    try {
      grabber.start();
      grabber.setFrameRate(125);
      System.out.println(grabber.getFrameRate());
      System.out.println(grabber.getFormat());
      System.out.println(grabber.getPixelFormat());
      System.out.println(grabber.getSampleRate());
    } catch (com.googlecode.javacv.FrameGrabber.Exception e1) {
      e1.printStackTrace();
      return;
    }

    try {
      IplImage image = grabber.grab();
      img = image.getBufferedImage();
    } catch (Exception e) {
      e.printStackTrace();
    }

    webcamReaderThread = new Thread(this, "WebcamReaderThread");
    webcamReaderThread.start();
  }
  public IplImage DetectFaces(IplImage image) throws Exception {

    // Converts the image to gray scale for detection to work, using the same dimensions as the
    // original.
    IplImage grayImage = IplImage.createFrom(convertColorToGray(image.getBufferedImage()));

    CvMemStorage storage = CvMemStorage.create();

    // Using the cascade file, this creates a classification for what objects to detect. In our case
    // it is the anterior of the face.
    CvHaarClassifierCascade classifier = new CvHaarClassifierCascade(cvLoad(CASCADE_FILE));

    // Detect Haar-like objects, depending on the classifier. In this case we use a classifier for
    // detecting the anterior of the face.
    CvSeq faces = cvHaarDetectObjects(grayImage, classifier, storage, 1.1, 1, 0);

    // Initialize the static variables in FaceScanner for determining the area to crop the largest
    // detected face.
    FaceScanner.height = 0;
    FaceScanner.width = 0;
    FaceScanner.x = 0;
    FaceScanner.y = 0;

    // Loop through all detected faces and save the largest (closest) face.
    for (int i = 0; i < faces.total(); i++) {
      CvRect rect = new CvRect(cvGetSeqElem(faces, i));
      if (FaceScanner.width < rect.width()) {
        FaceScanner.width = rect.width();
        FaceScanner.height = rect.height();
        FaceScanner.x = rect.x();
        FaceScanner.y = rect.y();
      }

      if (FaceScanner.displayRects) {
        /*Uncomment to draw the rectangles around the detected faces.*/
        // if(rect.width() > 130 && rect.height() > 130){
        // Draw a square around the detected face.
        cvRectangle(
            image,
            cvPoint(rect.x(), rect.y()),
            cvPoint(rect.x() + rect.width(), rect.y() + rect.height()),
            CvScalar.GREEN,
            2,
            CV_AA,
            0);
        // }
        /*-----------------------------------------------------------*/
      }
    }

    // Checks that there was a detected face in the image before saving. Also, the detected "face"
    // must be large enough to be considered
    // a detected face. This is to limit the amount of erroneous detections. This saves the full
    // size image with detections drawn on
    // whole image before cropping.
    if (!(FaceScanner.height == 0 && FaceScanner.width == 0)
        && !(FaceScanner.height < 130 && FaceScanner.width < 130)) {
      // Save the image with rectangles.
      // cvSaveImage(filename.replace(".png", "-Rect.png"), image);
    } else {
      return null;
    }

    return image;
  }
 public IplImage scale(IplImage img, int scale) {
   BufferedImage bi = resize(img.getBufferedImage(), img.width() / scale, img.height() / scale);
   ImageConverter ic = new ImageConverter();
   return ic.convertRGB(bi);
 }
Esempio n. 7
0
 public static BufferedImage paint(BufferedImage input, Block node) {
   IplImage canvas = IplImage.createFrom(input);
   paint(canvas, node);
   return canvas.getBufferedImage();
 }