public BlobTracker(int srcWidth, int srcHeight) {
    super(srcWidth, srcHeight);

    blur = new Blur();

    String mapFileName = ElProps.THE_PROPS.getProperty("regionMap", "regionMap.png");
    regionMap = new RegionMap(mapFileName);

    //			grayImage = new BufferedImage(w,h,BufferedImage.TYPE_USHORT_GRAY);
    grayImage = IplImage.create(w, h, IPL_DEPTH_8U, 1);
    //		scaledImage = new BufferedImage(w,h,BufferedImage.TYPE_USHORT_GRAY);

    diffImage = IplImage.create(w, h, IPL_DEPTH_8U, 1);
    blurImage = IplImage.create(w, h, IPL_DEPTH_8U, 1);
    threshImage = IplImage.create(w, h, IPL_DEPTH_8U, 1);

    background = new BackgroundImage(.001, 15);

    blobs = new Blobs(srcWidth, srcHeight, regionMap);
    tracker = new Tracker[regionMap.size()];
    for (int i = 0; i < regionMap.size(); i++) {
      tracker[i] = new Tracker(ElProps.THE_PROPS, regionMap.getRegion(i));
      tracker[i].start();
    }
  }
  /**
   * Receives frame and uses face recognition algorithms to set coordinates of faces
   *
   * @param originalImage
   * @return list of integer arrays, with coordinates and size of faces
   */
  public static List<Integer[]> detect(IplImage originalImage) {

    List<Integer[]> facesList = new ArrayList<Integer[]>();

    IplImage grayImage =
        IplImage.create(originalImage.width(), originalImage.height(), IPL_DEPTH_8U, 1);

    cvCvtColor(originalImage, grayImage, CV_BGR2GRAY);

    CvMemStorage storage = CvMemStorage.create();
    opencv_objdetect.CvHaarClassifierCascade cascade =
        new opencv_objdetect.CvHaarClassifierCascade(cvLoad(CASCADE_FILE));

    CvSeq faces = cvHaarDetectObjects(grayImage, cascade, storage, 1.1, 1, 0);

    Integer[] coordinates = null;
    for (int i = 0; i < faces.total(); i++) {
      CvRect r = new CvRect(cvGetSeqElem(faces, i));
      coordinates = new Integer[4];
      coordinates[0] = r.x();
      coordinates[1] = r.y();
      coordinates[2] = r.height();
      coordinates[3] = r.width();
      facesList.add(coordinates);
    }
    return facesList;
  }
示例#3
0
  public Histogram(IplImage img) {
    hist = new int[256];

    // calculate absolute values
    for (int i = 0; i < img.height(); i++) {
      for (int j = 0; j < img.width(); j++) {
        int val = (int) cvGetReal2D(img, i, j);
        hist[val]++;
      }
    }
  }
示例#4
0
  public IplImage correctGamma(IplImage img) {
    // get image intensity
    // img.applyGamma(d);
    IplImage gray = cvCreateImage(cvGetSize(img), IPL_DEPTH_8U, 1);
    cvCvtColor(img, gray, CV_RGB2GRAY);
    BufferedImage buffImg = img.getBufferedImage();
    BufferedImage buffGray = gray.getBufferedImage();
    double grayArr[] = new double[gray.width() * gray.height()];
    int counter = 0;
    for (int i = 0; i < gray.width(); i++) {
      for (int j = 0; j < gray.height(); j++) {
        grayArr[counter] = buffGray.getRGB(i, j);
        counter++;
      }
    }

    double imgSd = new StandardDeviation().evaluate(grayArr);
    double imgMean = new Mean().evaluate(grayArr);
    double y = 0;
    if (imgMean > 0.5) y = 1 + (Math.abs(0.5 - imgMean) / imgSd);
    else y = 1 / (1 + (Math.abs(0.5 - imgMean) / imgSd));

    img.applyGamma(y);

    return img;
  }
示例#5
0
  /**
   * Creates a new image by using img as the color image
   *
   * @param img an 8U1C or 8U3C image (8bit unsigned and 1 or 3 channels)
   */
  public Image(IplImage img) {
    if (img.nChannels() != 1) {
      // color image
      this.color = img;
      this.gray = IplImage.create(color.cvSize(), IPL_DEPTH_8U, 1);
      cvCvtColor(color, gray, CV_BGR2GRAY);
    } else {
      // grayscale image
      this.color = img;
      this.gray = color;
    }

    this.img = gray.clone();
    this.temp = gray.clone();
  }
示例#6
0
  public void stop() throws Exception {
    // Free the RGB image
    if (buffer != null) {
      av_free(buffer);
      buffer = null;
    }
    if (pFrameRGB != null) {
      av_free(pFrameRGB);
      pFrameRGB = null;
    }

    // Free the YUV frame
    if (pFrame != null) {
      av_free(pFrame);
      pFrame = null;
    }

    // Close the codec
    if (pCodecCtx != null) {
      avcodec_close(pCodecCtx);
      pCodecCtx = null;
    }

    // Close the video file
    if (pFormatCtx != null && !pFormatCtx.isNull()) {
      av_close_input_file(pFormatCtx);
      pFormatCtx = null;
    }

    if (return_image != null) {
      return_image.release();
      return_image = null;
    }
  }
    @Override
    public void run() {
      try {
        int sw = (int) (w * scale);
        int sh = (int) (h * scale);
        xoffset = (width - sw) / 2;
        yoffset = (height - sh) / 2;
        if (bitmap == null) {
          bitmap = Bitmap.createBitmap(sw, sh, Bitmap.Config.ARGB_8888);
        }
        if (rgbaImage == null) {
          rgbaImage = IplImage.create(sw, sh, IPL_DEPTH_8U, 4);
        }
        if (resizedImage == null) {
          resizedImage = IplImage.create(sw, sh, IPL_DEPTH_8U, 3);
        }

        for (int i = 0; i < 16; i++) {
          int currentFps = fpsCounter.getFPS();
          // System.out.println(currentFps+"/"+fps);
          if (currentFps > 10) {
            Thread.sleep(0, 999999);
            continue;
          }

          if (currentFps < 1) {
            rendering = false;
            return;
          }

          break;
        }

        cvResize(image, resizedImage);
        cvCvtColor(resizedImage, rgbaImage, CV_BGR2RGBA);
        bitmap.copyPixelsFromBuffer(rgbaImage.getByteBuffer());

        draw();

        fpsCounter.addFrameTime();
      } catch (Exception e) {
        Log.e(LOG_TAG, "onImage error " + e.getMessage());
      }

      rendering = false;
    }
 public void run() {
   isShutdown = false;
   while (!isShutdown) {
     try {
       // long start = System.currentTimeMillis();
       IplImage image = grabber.grab();
       if (image != null) {
         img = image.getBufferedImage();
         // System.out.println(System.currentTimeMillis() - start + "ms");
       } else {
         // System.out.println(System.currentTimeMillis() - start + "ms failed");
       }
     } catch (Exception e) {
       e.printStackTrace();
     }
   }
 }
  public videoPanel() {
    cr = new CameraReader();
    cr.Start();
    this.size = new Dimension(cr.getColorFrame().width() + 20, cr.getColorFrame().height() + 50);

    iv = cr.getColorFrame();
    this.add(new JLabel(new ImageIcon(iv.getBufferedImage())));

    // this.run();
  }
    public RenderingThread(IplImage image) {
      this.w = image.cvSize().width();
      this.h = image.cvSize().height();
      if (width / w > height / h) {
        scale = (float) width / w;
      } else {
        scale = (float) height / h;
      }

      this.image = image; // .clone();
      rendering = true;
    }
  public void start() {
    try {
      grabber.start();
      grabber.setFrameRate(125);
      System.out.println(grabber.getFrameRate());
      System.out.println(grabber.getFormat());
      System.out.println(grabber.getPixelFormat());
      System.out.println(grabber.getSampleRate());
    } catch (com.googlecode.javacv.FrameGrabber.Exception e1) {
      e1.printStackTrace();
      return;
    }

    try {
      IplImage image = grabber.grab();
      img = image.getBufferedImage();
    } catch (Exception e) {
      e.printStackTrace();
    }

    webcamReaderThread = new Thread(this, "WebcamReaderThread");
    webcamReaderThread.start();
  }
示例#12
0
  public IplImage asImage() {
    if (img == null) {
      int max = hist[0];
      for (int i = 1; i < hist.length; i++) {
        max = max >= hist[i] ? max : hist[i];
      }

      int height = 100;
      img = IplImage.create(cvSize(hist.length, height), 8, 1);

      cvSetZero(img);
      for (int x = 0; x < hist.length; x++) {
        int val = (int) Math.round(hist[x] * (height / (double) max));
        cvDrawLine(img, cvPoint(x, height), cvPoint(x, height - val), CvScalar.WHITE, 1, 1, 0);
      }
    }
    return img;
  }
示例#13
0
  public IplImage asTotalImage() {
    if (totalImg == null) {
      int sum = hist[0];
      for (int i = 1; i < hist.length; i++) {
        sum += hist[i];
      }

      int height = 512;
      totalImg = IplImage.create(cvSize(hist.length, height), 8, 1);

      cvSetZero(totalImg);
      for (int x = 0; x < hist.length; x++) {
        int val = (int) Math.round(hist[x] * (height / (double) sum));
        cvDrawLine(totalImg, cvPoint(x, height), cvPoint(x, height - val), CvScalar.WHITE, 1, 1, 0);
      }
    }
    return totalImg;
  }
示例#14
0
  public IplImage grab() throws Exception {
    if (pFormatCtx == null || pFormatCtx.isNull()) {
      throw new Exception("Could not grab: No AVFormatContext. (Has start() been called?)");
    }
    boolean done = false;
    long pts = 0;
    while (!done) {
      if (av_read_frame(pFormatCtx, packet) < 0) {
        // throw new Exception("Could not read frame");
        return null; // end of file?
      }

      // Is this a packet from the video stream?
      if (packet.stream_index() == videoStream) {
        // Decode video frame
        int len = avcodec_decode_video2(pCodecCtx, pFrame, frameFinished, packet);

        LongPointer opaque = new LongPointer(pFrame.opaque());
        if (packet.dts() != AV_NOPTS_VALUE) {
          pts = packet.dts();
        } else if (!opaque.isNull() && opaque.get() != AV_NOPTS_VALUE) {
          pts = opaque.get();
        } else {
          pts = 0;
        }
        AVRational time_base = pStream.time_base();
        pts = 1000 * pts * time_base.num() / time_base.den();

        // Did we get a video frame?
        if (len > 0 && frameFinished[0] != 0) {
          switch (colorMode) {
            case BGR:
            case GRAY:
              // Deinterlace Picture
              if (deinterlace) {
                avpicture_deinterlace(
                    pFrame, pFrame, pCodecCtx.pix_fmt(), pCodecCtx.width(), pCodecCtx.height());
              }

              // Convert the image from its native format to RGB
              sws_scale(
                  img_convert_ctx,
                  new PointerPointer(pFrame),
                  pFrame.linesize(),
                  0,
                  pCodecCtx.height(),
                  new PointerPointer(pFrameRGB),
                  pFrameRGB.linesize());
              return_image.imageData(buffer);
              return_image.widthStep(pFrameRGB.linesize(0));
              break;
            case RAW:
              assert (pCodecCtx.width() == return_image.width()
                  && pCodecCtx.height() == return_image.height());
              return_image.imageData(pFrame.data(0));
              return_image.widthStep(pFrame.linesize(0));
              break;
            default:
              assert (false);
          }
          return_image.imageSize(return_image.height() * return_image.widthStep());

          done = true;
        }
      }

      // Free the packet that was allocated by av_read_frame
      av_free_packet(packet);
    }

    return_image.timestamp = pts;
    return return_image;
  }
示例#15
0
 private static int clipX(IplImage img, double x) {
   return (int) Math.round(Math.min(Math.max(x, 0), img.width() - 1));
 }
示例#16
0
  public void start() throws Exception {
    // Open video file
    AVInputFormat f = null;
    if (format != null && format.length() > 0) {
      f = av_find_input_format(format);
      if (f == null) {
        throw new Exception("Could not find input format \"" + format + "\".");
      }
    }
    AVFormatParameters fp = null;
    if (frameRate > 0 || bpp > 0 || imageWidth > 0 || imageHeight > 0) {
      fp = new AVFormatParameters();
      fp.time_base(av_d2q(1 / frameRate, FFmpegFrameRecorder.DEFAULT_FRAME_RATE_BASE));
      fp.sample_rate(bpp);
      fp.channels(colorMode == ColorMode.BGR ? 3 : 1);
      fp.width(imageWidth);
      fp.height(imageHeight);
    }
    if (av_open_input_file(pFormatCtx, filename, f, 0, fp) != 0) {
      throw new Exception("Could not open file \"" + filename + "\".");
    }

    // Retrieve stream information
    if (av_find_stream_info(pFormatCtx) < 0) {
      throw new Exception("Could not find stream information.");
    }

    // Dump information about file onto standard error
    dump_format(pFormatCtx, 0, filename, 0);

    // Find the first video stream
    videoStream = -1;
    int nb_streams = pFormatCtx.nb_streams();
    for (int i = 0; i < nb_streams; i++) {
      pStream = pFormatCtx.streams(i);
      // Get a pointer to the codec context for the video stream
      pCodecCtx = pStream.codec();
      if (pCodecCtx.codec_type() == CODEC_TYPE_VIDEO) {
        videoStream = i;
        break;
      }
    }
    if (videoStream == -1) {
      throw new Exception("Did not find a video stream.");
    }

    // Find the decoder for the video stream
    pCodec = avcodec_find_decoder(pCodecCtx.codec_id());
    if (pCodec == null) {
      throw new Exception("Unsupported codec or codec not found: " + pCodecCtx.codec_id() + ".");
    }

    // Open codec
    if (avcodec_open(pCodecCtx, pCodec) < 0) {
      throw new Exception("Could not open codec.");
    }

    // Allocate video frame
    pFrame = avcodec_alloc_frame();

    // Allocate an AVFrame structure
    pFrameRGB = avcodec_alloc_frame();
    if (pFrameRGB == null) {
      throw new Exception("Could not allocate frame.");
    }

    int width = getImageWidth() > 0 ? getImageWidth() : pCodecCtx.width();
    int height = getImageHeight() > 0 ? getImageHeight() : pCodecCtx.height();

    switch (colorMode) {
      case BGR:
        // Determine required buffer size and allocate buffer
        numBytes = avpicture_get_size(PIX_FMT_BGR24, width, height);
        buffer = new BytePointer(av_malloc(numBytes));

        // Assign appropriate parts of buffer to image planes in pFrameRGB
        // Note that pFrameRGB is an AVFrame, but AVFrame is a superset
        // of AVPicture
        avpicture_fill(pFrameRGB, buffer, PIX_FMT_BGR24, width, height);

        // Convert the image into BGR format that OpenCV uses
        img_convert_ctx =
            sws_getContext(
                pCodecCtx.width(),
                pCodecCtx.height(),
                pCodecCtx.pix_fmt(),
                width,
                height,
                PIX_FMT_BGR24,
                SWS_BILINEAR,
                null,
                null,
                null);
        if (img_convert_ctx == null) {
          throw new Exception("Cannot initialize the conversion context.");
        }

        return_image = IplImage.createHeader(width, height, IPL_DEPTH_8U, 3);
        break;

      case GRAY:
        numBytes = avpicture_get_size(PIX_FMT_GRAY8, width, height);
        buffer = new BytePointer(av_malloc(numBytes));
        avpicture_fill(pFrameRGB, buffer, PIX_FMT_GRAY8, width, height);

        // Convert the image into GRAY format that OpenCV uses
        img_convert_ctx =
            sws_getContext(
                pCodecCtx.width(),
                pCodecCtx.height(),
                pCodecCtx.pix_fmt(),
                width,
                height,
                PIX_FMT_GRAY8,
                SWS_BILINEAR,
                null,
                null,
                null);
        if (img_convert_ctx == null) {
          throw new Exception("Cannot initialize the conversion context.");
        }

        return_image = IplImage.createHeader(width, height, IPL_DEPTH_8U, 1);
        break;

      case RAW:
        numBytes = 0;
        buffer = null;
        img_convert_ctx = null;
        return_image =
            IplImage.createHeader(pCodecCtx.width(), pCodecCtx.height(), IPL_DEPTH_8U, 1);
        break;

      default:
        assert (false);
    }
  }
  public static void captureFrame() {
    // ---------These objects allow us to edit the variables used in the scalar polygon
    // recognition------
    JLabel blueMaxValueLabel = new JLabel("Max Blue Value");
    JTextField blueMaxValueField = new JTextField("255.0", 5);

    JLabel blueMinValueLabel = new JLabel("Min Blue Value");
    JTextField blueMinValueField = new JTextField("100.0", 5);

    JLabel greenMaxValueLabel = new JLabel("Max Green Value");
    JTextField greenMaxValueField = new JTextField("255.0", 5);

    JLabel greenMinValueLabel = new JLabel("Min Green Value");
    JTextField greenMinValueField = new JTextField("215.0", 5);

    JLabel redMaxValueLabel = new JLabel("Max Red Value");
    JTextField redMaxValueField = new JTextField("45.0", 5);

    JLabel redMinValueLabel = new JLabel("Min Red Value");
    JTextField redMinValueField = new JTextField("0.0", 5);

    JLabel yLabel = new JLabel("Y");
    JTextField yValue = new JTextField(5);
    boolean hotZone = false;

    // ---------------------------------End object lists-------------------------------------
    String source = "http://*****:*****@10.12.59.11/mjpg/video.mjpg";
    FrameGrabber grabber = new OpenCVFrameGrabber(source); // new VideoInputFrameGrabber(0);
    try {
      Thread.sleep(1000);
    } catch (InterruptedException ex) {
      Logger.getLogger(CaptureImage.class.getName()).log(Level.SEVERE, null, ex);
    }

    CanvasFrame canvas = new CanvasFrame("WebCam");
    canvas.setDefaultCloseOperation(javax.swing.JFrame.EXIT_ON_CLOSE);
    CanvasFrame before = new CanvasFrame("before");
    before.setDefaultCloseOperation(javax.swing.JFrame.HIDE_ON_CLOSE);
    GridBagLayout gridBag = new GridBagLayout();
    before.setLayout(gridBag);
    GridBagConstraints gbc = new GridBagConstraints();
    gbc.fill = GridBagConstraints.VERTICAL;
    gridBag.setConstraints(before.getComponent(0), gbc);
    gbc.fill = GridBagConstraints.NONE;
    gbc.gridx = 1;
    gbc.gridy = 0;
    before.add(blueMaxValueLabel, gbc);
    gbc.gridx = 2;
    before.add(blueMaxValueField, gbc);
    gbc.gridx = 3;
    before.add(blueMinValueLabel, gbc);
    gbc.gridx = 4;
    before.add(blueMinValueField, gbc);
    // before.add(blueMinValueField);
    gbc.gridx = 1;
    gbc.gridy = 2;
    before.add(greenMaxValueLabel, gbc);
    gbc.gridx = 2;
    before.add(greenMaxValueField, gbc);
    gbc.gridx = 3;
    before.add(greenMinValueLabel, gbc);
    gbc.gridx = 4;
    before.add(greenMinValueField, gbc);
    // before.add(greenMinValueField);
    gbc.gridx = 1;
    gbc.gridy = 3;
    before.add(redMaxValueLabel, gbc);
    gbc.gridx = 2;
    before.add(redMaxValueField, gbc);
    gbc.gridx = 3;
    before.add(redMinValueLabel, gbc);
    gbc.gridx = 4;
    before.add(redMinValueField, gbc);
    before.add(yLabel);
    before.add(yValue);
    // before.add(redMinValueField);

    int numOfScreens = 39;
    //        String loadingPic = "C:\\loadingScreen\\loadingScreen"
    //                + (((int) (Math.random() * numOfScreens)) + 1) + ".bmp";
    int failedGrabs = 0;
    NetworkTable.setTeam(
        1259); // Capture image requires the setTeam method because it does not have use
               // Smartdashboard which already assigns a team number in the from of a network table
    NetworkTable.setIPAddress("10.12.59.2");
    NetworkTable.setClientMode();
    NetworkTable.getTable("SmartDashboard").putBoolean("TestTable", hotZone);
    while (true) {
      try {
        //                try {
        //                    IplImage splashScreen = new IplImage(cvLoadImage(loadingPic));
        //                    canvas.showImage(splashScreen);
        //                } catch (Exception e) {
        //                }
        grabber.start();
        IplImage img;
        // IplImage hsv;
        // IplImage canny;
        IplImage displayImg;
        IplImage dst;
        PolygonFinder polyFind = new PolygonFinder();

        //                try {
        //                    NetworkTable.getTable("camera").putNumber("distance", 0);
        //                    NetworkTable.getTable("camera").putBoolean("hotZone", false);
        //                } catch (Exception e) {
        //                }

        while (true) {

          while (true) {
            try {
              // System.out.println("grabbing...");
              img = new IplImage(grabber.grab());
              displayImg = new IplImage(cvCreateImage(img.cvSize(), img.depth(), img.nChannels()));
              cvCopy(img, displayImg, null);
              // System.out.println("Frame GRABBED!");
              break;
            } catch (Exception e) {
              failedGrabs++;
              System.out.println(failedGrabs);
              if (failedGrabs > 10) {
                grabber = new OpenCVFrameGrabber(source);
                grabber.start();
                failedGrabs = 0;
              }
              continue;
            }
          }

          //            PolygonFinder polyFind = new PolygonFinder(img);
          //            ArrayList<PolygonStructure> polygons = polyFind.findPolygons(4);
          //            for (int i = 0; i < polygons.size(); i++) {
          //                polygons.get(i).drawShape(img, CvScalar.MAGENTA);
          //                }
          //            canvas.showImage(img);
          //            return;
          if (img != null) {
            // cvSaveImage("originalcapture.jpg", img);
          }
          // IplImage gray = cvCreateImage(cvGetSize(img), img.depth(), 1 );
          // hsv = new IplImage(cvCreateImage(cvGetSize(img), img.depth(), 3));
          dst = new IplImage(cvCreateImage(cvGetSize(img), img.depth(), 1));
          // cvCvtColor(img, gray, CV_RGB2GRAY);

          // cvCvtColor(img, hsv, CV_BGR2HSV);
          //            if(hsv != null) {
          //                cvSaveImage("hsv.jpg", hsv);
          //            }
          // 30 20 0; 70 140 60
          // 50 175 75 //// 100 255 225
          // cvInRangeS(hsv, cvScalar(0, 200, 0, 0), cvScalar(150, 255, 255, 0), dst);
          // cvDrawLine(img, new CvPoint(0, 360), new CvPoint(639, 360), CvScalar.BLACK, 240, 8, 0);
          // cvInRangeS(img, cvScalar(100, 215, 0, 0), cvScalar(255, 255, 45, 0), dst); This is the
          // original
          // Code used to set max and min values for bgr scale in scalars VVV
          cvInRangeS(
              img,
              cvScalar(
                  (new Double(blueMinValueField.getText())).doubleValue(),
                  (new Double(greenMinValueField.getText())).doubleValue(),
                  (new Double(redMinValueField.getText())).doubleValue(),
                  0),
              cvScalar(
                  (new Double(blueMaxValueField.getText())).doubleValue(),
                  (new Double(greenMaxValueField.getText())).doubleValue(),
                  (new Double(redMaxValueField.getText())).doubleValue(),
                  0),
              dst);
          // NEED TO FLIP MAX IN MIN POSITION, MIN IS IN MAX POSITION

          // cvInRangeS(img, cvScalar(0, 0, 0, 0), cvScalar(255, 255, 255, 0), dst);
          // cvDilate( dst, dst, null, 1 );
          cvSmooth(dst, dst, CV_MEDIAN, 1, 1, 0, 0);
          // cvCanny(gray, dst, 50, 100, 3);
          // cvCvtColor(hsv, gray, CV_RGB2GRAY);
          // canvas.showImage(img);
          // before.showImage(dst);
          IplImage newDst =
              new IplImage(cvCloneImage(img)); // cvCreateImage(cvGetSize(img), img.depth(), 3));
          cvCvtColor(dst, newDst, CV_GRAY2BGR);
          // cvConvexHull2(newDst, null, CV_CLOCKWISE, 0);
          before.showImage(newDst);
          polyFind.setImage(newDst);
          ArrayList<PolygonStructure> polygons = new ArrayList<PolygonStructure>();
          int i;
          // before.showImage(newDst);
          for (i = 4; i < 5; i++) {
            for (PolygonStructure ps : polyFind.findPolygons(i)) {
              polygons.add(ps);
            }
          }
          for (int c = 0; c < polygons.size(); c++) {
            for (int d = 0; d < polygons.size(); d++) {
              if (c == d) {
                break;
              }
              if (polygons
                  .get(c)
                  .getPolygon()
                  .contains(polygons.get(d).getPolygon().getBounds2D())) {
                polygons.remove(d);
                if (d < c) {
                  c--;
                }
                d--;
              }
            }
          }
          for (int c = 0; c < polygons.size(); c++) {
            for (int d = 0; d < polygons.size(); d++) {
              if (c == d) {
                break;
              }
              if (polygons
                  .get(c)
                  .getPolygon()
                  .contains(
                      polygons.get(d).getPolygon().getBounds().getCenterX(),
                      polygons.get(d).getPolygon().getBounds().getCenterY())) {
                polygons.remove(d);
                if (d < c) {
                  c--;
                }
                d--;
              }
            }
          }
          //                    if
          // (tempPoly.getVertex(0).x()<vertices[0].x()&&tempPoly.getVertex(1).x()>vertices[0].x()
          //
          // &&tempPoly.getVertex(1).y()<vertices[0].y()&&tempPoly.getVertex(2).y()>vertices[0].y())
          //                    {
          //                        isInside = true;
          //                        break;
          //                    }
          for (i = 0; i < polygons.size(); i++) {
            if (polygons.size() < 2) {
              hotZone = false;
            } else {
              hotZone = true;
            }
            CvScalar polyColor = CvScalar.MAGENTA;
            switch (i) {
              case 0:
                {
                  polyColor = CvScalar.RED;
                  //                            System.out.println("Center X: "
                  //                                    + (320 - ((polygons.get(i).getVertex(3).x()
                  // + polygons.get(i).getVertex(2).x()) / 2))
                  //                                    + "\tCenter Y: "
                  //                                    + (240 - ((polygons.get(i).getVertex(3).y()
                  // + (polygons.get(i).getVertex(2).y())) / 2)));
                  double x =
                      (320
                          - ((polygons.get(i).getVertex(3).x() + polygons.get(i).getVertex(2).x())
                              / 2));
                  double angle =
                      (480
                          - ((polygons.get(i).getVertex(3).y() + (polygons.get(i).getVertex(2).y()))
                              / 2));
                  // double distance = 5182.2043151825 * Math.pow(angle, -1);

                  double distance = 514.7318 * Math.pow(angle - 220, -1.2);
                  // double distance;
                  if (angle < 317) {
                    distance = -0.370786516853933 * angle + 133.977528089888;
                  } else if (angle > 316 && angle < 325) {
                    distance = -0.184697808038669 * angle + 74.9375184489134;
                  } else if (angle > 324 && angle < 362) {
                    distance = -0.140145789191636 * angle + 60.5198480748917;
                  } else if (angle > 360) {
                    distance = -0.0702258215380136 * angle + 35.3150512441271;
                  }

                  for (int c = 0; c < 4; c++) {
                    System.out.println("Vertex " + c + ":" + polygons.get(i).getVertex(c).y());
                  }
                  yValue.setText("" + angle);

                  // hotZone = true;

                  break;
                }
              case 1:
                {
                  // Hot zone
                  polyColor = CvScalar.BLUE;
                  break;
                }
              case 2:
                {
                  polyColor = CvScalar.GREEN;
                  break;
                }
              case 3:
                {
                  polyColor = CvScalar.YELLOW;
                  break;
                }
            }
            // polygons.get(i).drawShape(img, polyColor);
            cvLine(
                displayImg,
                polygons.get(i).getVertices()[3],
                polygons.get(i).getVertices()[0],
                polyColor,
                3,
                CV_AA,
                0);
            cvLine(
                displayImg,
                polygons.get(i).getVertices()[0],
                polygons.get(i).getVertices()[1],
                polyColor,
                3,
                CV_AA,
                0);
            cvDrawCircle(displayImg, polygons.get(i).getVertices()[0], 3, CvScalar.GRAY, 1, 8, 0);
            cvLine(
                displayImg,
                polygons.get(i).getVertices()[1],
                polygons.get(i).getVertices()[2],
                polyColor,
                3,
                CV_AA,
                0);
            cvDrawCircle(
                displayImg, polygons.get(i).getVertices()[1], 3, CvScalar.MAGENTA, 1, 8, 0);
            cvLine(
                displayImg,
                polygons.get(i).getVertices()[2],
                polygons.get(i).getVertices()[3],
                polyColor,
                3,
                CV_AA,
                0);
            cvDrawCircle(displayImg, polygons.get(i).getVertices()[2], 3, CvScalar.BLACK, 1, 8, 0);
            cvDrawCircle(displayImg, polygons.get(i).getVertices()[3], 3, CvScalar.CYAN, 1, 8, 0);
            // System.out.println("Polygon " + i + "\t" + polygons.get(i).getVertices()[0]);
          }

          try {
            // NetworkTable.getTable("camera").beginTransaction();
            //                        NetworkTable.getTable("camera").putNumber("distance",
            // distance);
            //                        NetworkTable.getTable("camera").putNumber("x", x);
            NetworkTable.getTable("camera").putBoolean("hotZone", hotZone);
            // NetworkTable.getTable("camera").endTransaction();

          } catch (Exception e) {
          }

          if (displayImg != null) {
            // cvSaveImage("aftercapture.jpg", dst);
            cvDrawLine(
                displayImg, new CvPoint(300, 240), new CvPoint(340, 240), CvScalar.WHITE, 2, 8, 0);
            cvDrawLine(
                displayImg, new CvPoint(320, 220), new CvPoint(320, 260), CvScalar.WHITE, 2, 8, 0);
            canvas.showImage(displayImg);
          } else {
            // System.out.println("Null Image");
          }

          // cvReleaseImage(gray);
          cvReleaseImage(newDst);
          // cvReleaseImage(img);
          // cvReleaseImage(hsv);
          cvReleaseImage(displayImg);
          cvReleaseImage(dst);
          // Thread.sleep(50);
        }
        // System.out.println("5");
        // grabber.stop();
      } catch (Exception e) {
        e.printStackTrace();
      }
      grabber = new OpenCVFrameGrabber(source);
    }
  }
示例#18
0
  public void generatePGMFromPic(String srcPath, String file, String destPath) throws Exception {

    String srcFilePath = srcPath + "/" + file;
    System.out.println("Loading image from " + srcFilePath);
    IplImage origImg = cvLoadImage(srcFilePath);

    // convert to grayscale
    IplImage grayImg = IplImage.create(origImg.width(), origImg.height(), IPL_DEPTH_8U, 1);
    cvCvtColor(origImg, grayImg, CV_BGR2GRAY);

    // scale the grayscale (to speed up face detection)
    IplImage smallImg =
        IplImage.create(grayImg.width() / SCALE, grayImg.height() / SCALE, IPL_DEPTH_8U, 1);
    cvResize(grayImg, smallImg, CV_INTER_LINEAR);

    // equalize the small grayscale
    IplImage equImg = IplImage.create(smallImg.width(), smallImg.height(), IPL_DEPTH_8U, 1);
    cvEqualizeHist(smallImg, equImg);

    CvMemStorage storage = CvMemStorage.create();

    CvHaarClassifierCascade cascade = new CvHaarClassifierCascade(cvLoad(CASCADE_FILE));
    System.out.println("Detecting faces...");
    CvSeq faces = cvHaarDetectObjects(equImg, cascade, storage, 1.1, 3, CV_HAAR_DO_CANNY_PRUNING);
    cvClearMemStorage(storage);
    int total = faces.total();
    System.out.println("Found " + total + " face(s)");
    for (int i = 0; i < total; i++) {
      CvRect r = new CvRect(cvGetSeqElem(faces, i));
      cvSetImageROI(
          origImg, cvRect(r.x() * SCALE, r.y() * SCALE, r.width() * SCALE, r.height() * SCALE));
      IplImage origface = cvCreateImage(cvSize(r.width() * SCALE, r.height() * SCALE), 8, 3);

      IplImage smallface = cvCreateImage(cvSize(120, 120), 8, 3);
      cvCopy(origImg, origface);
      cvResize(origface, smallface, CV_INTER_LINEAR);
      cvSaveImage(destPath + "/" + file + i + ".pgm", smallface);
      cvResetImageROI(origImg);
    }
  }
  public IplImage DetectFaces(IplImage image) throws Exception {

    // Converts the image to gray scale for detection to work, using the same dimensions as the
    // original.
    IplImage grayImage = IplImage.createFrom(convertColorToGray(image.getBufferedImage()));

    CvMemStorage storage = CvMemStorage.create();

    // Using the cascade file, this creates a classification for what objects to detect. In our case
    // it is the anterior of the face.
    CvHaarClassifierCascade classifier = new CvHaarClassifierCascade(cvLoad(CASCADE_FILE));

    // Detect Haar-like objects, depending on the classifier. In this case we use a classifier for
    // detecting the anterior of the face.
    CvSeq faces = cvHaarDetectObjects(grayImage, classifier, storage, 1.1, 1, 0);

    // Initialize the static variables in FaceScanner for determining the area to crop the largest
    // detected face.
    FaceScanner.height = 0;
    FaceScanner.width = 0;
    FaceScanner.x = 0;
    FaceScanner.y = 0;

    // Loop through all detected faces and save the largest (closest) face.
    for (int i = 0; i < faces.total(); i++) {
      CvRect rect = new CvRect(cvGetSeqElem(faces, i));
      if (FaceScanner.width < rect.width()) {
        FaceScanner.width = rect.width();
        FaceScanner.height = rect.height();
        FaceScanner.x = rect.x();
        FaceScanner.y = rect.y();
      }

      if (FaceScanner.displayRects) {
        /*Uncomment to draw the rectangles around the detected faces.*/
        // if(rect.width() > 130 && rect.height() > 130){
        // Draw a square around the detected face.
        cvRectangle(
            image,
            cvPoint(rect.x(), rect.y()),
            cvPoint(rect.x() + rect.width(), rect.y() + rect.height()),
            CvScalar.GREEN,
            2,
            CV_AA,
            0);
        // }
        /*-----------------------------------------------------------*/
      }
    }

    // Checks that there was a detected face in the image before saving. Also, the detected "face"
    // must be large enough to be considered
    // a detected face. This is to limit the amount of erroneous detections. This saves the full
    // size image with detections drawn on
    // whole image before cropping.
    if (!(FaceScanner.height == 0 && FaceScanner.width == 0)
        && !(FaceScanner.height < 130 && FaceScanner.width < 130)) {
      // Save the image with rectangles.
      // cvSaveImage(filename.replace(".png", "-Rect.png"), image);
    } else {
      return null;
    }

    return image;
  }
示例#20
0
 private static int clipY(IplImage img, double y) {
   return (int) Math.round(Math.min(Math.max(y, 0), img.height() - 1));
 }
 public IplImage scale(IplImage img, int scale) {
   BufferedImage bi = resize(img.getBufferedImage(), img.width() / scale, img.height() / scale);
   ImageConverter ic = new ImageConverter();
   return ic.convertRGB(bi);
 }
示例#22
0
 public static IplImage createLeafMask(Block root, Dimension imageSize) {
   IplImage mask = IplImage.create(cvSize(imageSize.width, imageSize.height), 8, 1);
   cvSet(mask, CvScalar.BLACK);
   paintLeafMaskRecursively(mask, root);
   return mask;
 }
示例#23
0
  public boolean generateIndex(
      BufferedImage img, String filename, int frameno, Client client, FileWriter file) {
    Initializer.initialize();

    String color_name;
    boolean isFace;
    IplImage cvimage = IplImage.createFrom(img);

    long[][] hsv_hist = new long[Initializer.NUM_BINS_HSV_HUE][Initializer.NUM_BINS_HSV_SAT];
    color_name = Histogram.generateHistHSV(cvimage, hsv_hist);
    if (color_name.equals("No")) return false;
    isFace = faceDetection.faceDetect(cvimage);

    long[] corner_hist = new long[Initializer.NUM_BINS_DIST_1D];
    CornerDetector.findCorners(cvimage, corner_hist);

    IplImage gray_img = null;

    CvSize img_sz = null;
    if (cvimage != null) img_sz = cvGetSize(cvimage);
    if (img_sz != null) gray_img = cvCreateImage(img_sz, IPL_DEPTH_8U, 1);
    else System.out.println("couldnt create image");
    cvCvtColor(cvimage, gray_img, CV_BGR2GRAY);

    long[][] apidq_hist =
        new long[Initializer.NUM_BINS_DIST_APIDQ][Initializer.NUM_BINS_ANGLE_APIDQ];
    Histogram.generateHistAPIDQ(gray_img, apidq_hist);

    IplImage img_edge = cvCreateImage(cvGetSize(gray_img), gray_img.depth(), 1);
    long[][] edge_hist = new long[Initializer.NUM_BINS_DIST_2D][Initializer.NUM_BINS_ANGLE_2D];
    EdgeFinder.getegdes(gray_img, img_edge);
    ContourFinder.findContour(img_edge, edge_hist);

    // System.err.println(convertToString(corner_hist, apidq_hist, hsv_hist, edge_hist));

    // System.err.println("------------");

    String vector =
        convertToString(color_name, isFace, corner_hist, apidq_hist, hsv_hist, edge_hist);
    // insertIndexes(vector,filename,frameno, client);
    client.admin().indices().refresh(new RefreshRequest("movie"));

    IndexComparison incmp = new IndexComparison();
    if (incmp.compare_reject(filename, frameno, vector, client) != -1) {
      insertIndexes(vector, filename, frameno, client);
      client.admin().indices().refresh(new RefreshRequest("movie"));
      // System.err.println(filename+"_"+frameno+" ** inserted");
      return true;
    }
    // else System.err.println(filename+"_"+frameno+"-------not inserted");
    client.admin().indices().refresh(new RefreshRequest("movie"));

    return false;

    // insertIndexes(vector,filename,frameno, client);

    // WARNING NOT RELEASED-------------------------------------------------------
    // gray_img..release(); //cvReleaseImage(gray_img);//if(!gray_img.isNull())gray_img.release();
    // img_edge.release(); //cvReleaseImage(img_edge);//if(!img_edge.isNull())img_edge.release();
    // cvimage.release(); //cvReleaseImage(cvimage);//cvimage.release();
  }
示例#24
0
  /** usage: java HoughLines imageDir\imageName TransformType */
  public static void main(String[] args) {

    String fileName =
        args.length >= 1 ? args[0] : "pic1.png"; // if no params provided, compute the defaut image
    IplImage src = cvLoadImage(fileName, 0);
    IplImage dst;
    IplImage colorDst;
    CvMemStorage storage = cvCreateMemStorage(0);
    CvSeq lines = new CvSeq();

    CanvasFrame source = new CanvasFrame("Source");
    CanvasFrame hough = new CanvasFrame("Hough");
    if (src == null) {
      System.out.println("Couldn't load source image.");
      return;
    }

    dst = cvCreateImage(cvGetSize(src), src.depth(), 1);
    colorDst = cvCreateImage(cvGetSize(src), src.depth(), 3);

    cvCanny(src, dst, 50, 200, 3);
    cvCvtColor(dst, colorDst, CV_GRAY2BGR);

    /*
     * apply the probabilistic hough transform
     * which returns for each line deteced two points ((x1, y1); (x2,y2))
     * defining the detected segment
     */
    if (args.length == 2 && args[1].contentEquals("probabilistic")) {
      System.out.println("Using the Probabilistic Hough Transform");
      lines = cvHoughLines2(dst, storage, CV_HOUGH_PROBABILISTIC, 1, Math.PI / 180, 40, 50, 10);
      for (int i = 0; i <= lines.total(); i++) {
        // from JavaCPP, the equivalent of the C code:
        // CvPoint* line = (CvPoint*)cvGetSeqElem(lines,i);
        // CvPoint first=line[0], second=line[1]
        // is:
        // CvPoint first=line.position(0), secon=line.position(1);

        Pointer line = cvGetSeqElem(lines, i);
        CvPoint pt1 = new CvPoint(line).position(0);
        CvPoint pt2 = new CvPoint(line).position(1);

        System.out.println("Line spotted: ");
        System.out.println("\t pt1: " + pt1);
        System.out.println("\t pt2: " + pt2);
        cvLine(colorDst, pt1, pt2, CV_RGB(255, 0, 0), 3, CV_AA, 0); // draw the segment on the image
      }
    }
    /*
     * Apply the multiscale hough transform which returns for each line two float parameters (rho, theta)
     * rho: distance from the origin of the image to the line
     * theta: angle between the x-axis and the normal line of the detected line
     */
    else if (args.length == 2 && args[1].contentEquals("multiscale")) {
      System.out.println("Using the multiscale Hough Transform"); //
      lines = cvHoughLines2(dst, storage, CV_HOUGH_MULTI_SCALE, 1, Math.PI / 180, 40, 1, 1);
      for (int i = 0; i < lines.total(); i++) {
        CvPoint2D32f point = new CvPoint2D32f(cvGetSeqElem(lines, i));

        float rho = point.x();
        float theta = point.y();

        double a = Math.cos((double) theta), b = Math.sin((double) theta);
        double x0 = a * rho, y0 = b * rho;
        CvPoint
            pt1 =
                new CvPoint((int) Math.round(x0 + 1000 * (-b)), (int) Math.round(y0 + 1000 * (a))),
            pt2 =
                new CvPoint((int) Math.round(x0 - 1000 * (-b)), (int) Math.round(y0 - 1000 * (a)));
        System.out.println("Line spoted: ");
        System.out.println("\t rho= " + rho);
        System.out.println("\t theta= " + theta);
        cvLine(colorDst, pt1, pt2, CV_RGB(255, 0, 0), 3, CV_AA, 0);
      }
    }
    /*
     * Default: apply the standard hough transform. Outputs: same as the multiscale output.
     */
    else {
      System.out.println("Using the Standard Hough Transform");
      lines = cvHoughLines2(dst, storage, CV_HOUGH_STANDARD, 1, Math.PI / 180, 90, 0, 0);
      for (int i = 0; i < lines.total(); i++) {
        CvPoint2D32f point = new CvPoint2D32f(cvGetSeqElem(lines, i));

        float rho = point.x();
        float theta = point.y();

        double a = Math.cos((double) theta), b = Math.sin((double) theta);
        double x0 = a * rho, y0 = b * rho;
        CvPoint
            pt1 =
                new CvPoint((int) Math.round(x0 + 1000 * (-b)), (int) Math.round(y0 + 1000 * (a))),
            pt2 =
                new CvPoint((int) Math.round(x0 - 1000 * (-b)), (int) Math.round(y0 - 1000 * (a)));
        System.out.println("Line spotted: ");
        System.out.println("\t rho= " + rho);
        System.out.println("\t theta= " + theta);
        cvLine(colorDst, pt1, pt2, CV_RGB(255, 0, 0), 3, CV_AA, 0);
      }
    }
    source.showImage(src);
    hough.showImage(colorDst);

    source.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
    hough.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
  }
  public static void main(String[] args)
      throws IOException, InterruptedException, IM4JavaException,
          PdfException { // is - is the inputstream of the pdf file
    System.out.println("inside grader");

    // required debugging code
    Mongo m = new Mongo();
    DB db = m.getDB("ecomm_database");
    DBCollection coll = db.getCollection("testschemas");
    ObjectMapper mapper = new ObjectMapper();

    //		String message = "4fda1af52f910cc6200000d3"; //test id, that i will have in the real version
    String message = "500bb8811a316fda2400003b"; // id of second test
    DBObject TestObject =
        coll.findOne(new BasicDBObject("_id", new ObjectId(message))); // the actual mongo query
    System.out.println("Test Object = " + TestObject);
    JsonNode rootNode = mapper.readValue(TestObject.toString().getBytes("UTF-8"), JsonNode.class);
    JsonNode TestAnswerSheet = rootNode.get("TestAnswerSheet"); // TestAnswerSheet
    JsonNode Questions = rootNode.get("Questions");
    System.out.println("size of Questions = " + Questions.size());
    int numofquestions = Questions.size();
    System.out.println("size of answers = " + TestAnswerSheet.size());
    int numofstudents =
        rootNode.get("NumberOfStudents").getIntValue(); // grab the number of students
    System.out.println("Numer of students = " + numofstudents);

    //	    FillScore(Questions);

    //        for(int x = 0; x < Answers.size(); x++){
    //
    //		   	int IDS = Answers.get(x).get("IDS").getIntValue(); //grab the question
    //		   	String QID = new String(Answers.get(x).get("IDS").getTextValue()); //grab the question
    //		   	System.out.println("IDS = " + QID );
    //
    //        }//end of grade results

    //		JFrame frame = new JFrame(); //window popup //for debuggin
    // reading in file
    //		File PDF_file = new File("/Users/angellopozo/Documents/TestImages/PDF_CRICLEV2.pdf");

    /*
     *
     * 					Start of real code
     *
     */

    //		//workign with jpedal, will read from inputstream
    //	      PdfDecoder decode_pdf = new PdfDecoder(true);
    //	      try{
    ////	      decode_pdf.openPdfFileFromInputStream(is,true); //file
    //	      decode_pdf.openPdfFile("/Users/angellopozo/Dropbox/My
    // Code/java/MainRabbitMongo/Resources/CreatedPDF_Mongo_Test.pdf");  ///DEUG LINE
    ////	      BufferedImage img = decode_pdf.getPageAsImage(1);
    ////	      decode_pdf.closePdfFile();
    ////	      File fileToSave = new File("/Users/angellopozo/Dropbox/My
    // Code/java/MainRabbitMongo/src/main/java/RPC/jpedalRPCTEST1.jpg");
    ////		  ImageIO.write(img, "jpg", fileToSave);
    ////		  JFrame frame = new JFrame("jpedal buffered image");
    ////			Panel panel = new Panel();
    ////			frame.getContentPane().add(new JLabel(new ImageIcon(img)));
    ////			frame.pack();
    //////			frame.setLocationRelativeTo(null);
    ////			frame.setVisible(true);
    //	      PdfFileInformation fileinfo = decode_pdf.getFileInformationData();
    //	      String[] Fnames = fileinfo.getFieldValues();
    //	      for(int i = 0 ; i < Fnames.length; i++){
    //	    	  System.out.println("fname info = " + Fnames[i]);
    //	      }
    //	      System.out.println("xml data = " + fileinfo.getFileXMLMetaData());
    //	      System.out.println("name of the input stream file = " + decode_pdf.getFileName());
    //	      }
    //	      catch(PdfException e) {
    //			    e.printStackTrace();//return back and do the rpc to the user ... use return and check
    // returns?
    //
    //	      }

    //		File PDF_file = new File("/Users/angellopozo/Dropbox/My
    // Code/java/MainRabbitMongo/Resources/CreatedPDF_TestMongo_Graded.pdf"); //to large, need to do
    // some scaling
    //		File PDF_file = new File("/Users/angellopozo/Dropbox/My
    // Code/java/MainRabbitMongo/Resources/CreatedPDF_Mongo_Test_Inputs.pdf"); //working
    //		File PDF_file = new File("/Users/angellopozo/Dropbox/My
    // Code/java/MainRabbitMongo/Resources/CreatedPDF_Mongo_Grade_Random.pdf");
    //		File PDF_file = new File("/Users/angellopozo/Dropbox/My
    // Code/java/MainRabbitMongo/Resources/CreatedPDF_TestMongo_Graded_Vsmaller.pdf");
    File PDF_file =
        new File(
            "/Users/angellopozo/Dropbox/My Code/java/MainRabbitMongo/Resources/CreatedPDF_Mongo_Random_withScore_testnum2_Grade_LARGE.pdf");
    //	    File PDF_file = new File("/Users/angellopozo/Dropbox/My
    // Code/java/MainRabbitMongo/Resources/CreatedPDF_Mongo_Random_withScore_testnum2_Grade_LARGE_MISTAKES_doubles.pdf");
    //	    File PDF_file = new File("/Users/angellopozo/Dropbox/My
    // Code/java/MainRabbitMongo/Resources/CreatedPDF_Mongo_Random_withScore_testnum2_Grade_LARGE_MISTAKES_noreply.pdf");

    // just testing. I get a bufferedImageLuminanceSource.java.39 -> grabbing image file dimentions.
    //		PdfDecoder decode_pdf = new PdfDecoder(true);
    //		decode_pdf.openPdfFile("/Users/angellopozo/Dropbox/My
    // Code/java/MainRabbitMongo/Resources/CreatedPDF_Mongo_Grade_Random.pdf");
    //		int numpages = decode_pdf.getPageCount();

    PDDocument doc = PDDocument.load(PDF_file); // used to get page numbers
    int numpages = doc.getNumberOfPages(); // get page numbers for for loop
    int[] CorrectlyAnswered = new int[Questions.size()]; // number of correct answers
    int[] IncorrectlyAnswered =
        new int[Questions.size()]; // number of incorrectly answered responses
    byStudent bystudent =
        new byStudent(
            numofquestions,
            numofstudents); // create grading instance //Initialize with number of students
    byQuestion byquestion = new byQuestion(numofquestions, numofstudents);
    System.out.println("result size = " + CorrectlyAnswered.length);
    // need to fill the score array in byquestions
    for (int i = 0; i < Questions.size(); i++) {
      //			System.out.println("Score for this question = " +
      // Questions.get(i).get("Score").getDoubleValue());
      byquestion.ScoreDefault[i] = Questions.get(i).get("Score").getDoubleValue();
    } // end of filling score array in byquestion

    //		int numpages = decode_pdf.getPageCount(); //get page numbers for for loop
    System.out.println(
        "number of pages = "
            + numpages); // check to make sure the number of pages is reasonable, dont want this to
    // be too large call Db and return
    System.out.println("____________________________________");
    //		   JFrame frame = new JFrame(); //window popup
    //		ArrayList Results = new ArrayList(); //Array of the answer locations
    //		ArrayList WA = new ArrayList(); //array of wrong answers that were selected by the students
    //		ArrayList SR = new ArrayList(); //holding accumulated data below. selected answers array
    int numoffails = 0;
    int Aindex = 0;
    //		int Qindex = 0;
    int[][] Selections = new int[2][Questions.size()]; // student , question
    int[][] SelectionTotal = new int[Questions.size()][4]; // question, answer selected
    for (int i = 0; i < numpages; i++) { // for every page

      //		    	File PDF_file = new File("/Users/angellopozo/Documents/TestImages/PDF_CRICLEV2.pdf");
      // convert page to PDF
      BufferedImage PDF_img = ConvertPageToImage(PDF_file, i);
      //		    	 BufferedImage PDF_img = decode_pdf.getPageAsImage(i);

      // START creating luminance source
      LuminanceSource lumSource = new BufferedImageLuminanceSource(PDF_img);
      BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(lumSource));

      Reader reader = new QRCodeReader(); // create qr reader
      GenericMultipleBarcodeReader multireader = new GenericMultipleBarcodeReader(reader);

      Hashtable<DecodeHintType, Object> hints = new Hashtable<DecodeHintType, Object>();
      hints.put(DecodeHintType.TRY_HARDER, Boolean.TRUE);

      TreeMap<String, Rectangle2D> sortedBarcodeResults = new TreeMap<String, Rectangle2D>();
      Result results[] = null;
      try {
        results = multireader.decodeMultiple(bitmap, hints);
      } catch (ReaderException re) {
        return;
      } // end of try
      // END creating luminance source

      // go through each found QR Code and draw a box around it
      BufferedImage outimage = PDF_img; // copy of the pdf image
      Graphics2D g2 = outimage.createGraphics();
      g2.setColor(Color.green);
      g2.setStroke(new BasicStroke(3));
      // draw boxes around the found qrcodes
      int index = 0; // debug line to save images
      for (Result result : results) {
        System.out.println("barcode result: " + result.getText());
        double x1 = result.getResultPoints()[0].getX(); // top left
        double y1 = result.getResultPoints()[0].getY(); // top left
        double x2 = result.getResultPoints()[1].getX(); // top right
        double y2 = result.getResultPoints()[1].getY(); // top right
        double x3 = result.getResultPoints()[2].getX(); // bottom left
        double y3 = result.getResultPoints()[2].getY(); // bottom left
        // double x4 = result.getResultPoints()[3].getX(); //bottom right (bottom right square
        // location..some qr have it)
        //  double y4 = result.getResultPoints()[3].getY(); //bottom right (bottom right square
        // location..some qr have it)
        Rectangle2D rectbox = new Rectangle2D.Double(x2, y2, (x3 - x2), (y1 - y2));
        // Double buffer = 10.0;//highly dependent on the size of the qrcode
        // Rectangle2D rectbox = new Rectangle2D.Double(x2-buffer, y2-buffer, (x3-x2)+2*buffer,
        // (y1-y2)+2*buffer);
        //						    System.out.println("barcode location: " + x1 +" "+ y1 +" "+ x2 +" "+ y2 + " " +
        // x3 +" "+ y3);
        // System.out.println("barcode location: " + x3 +" "+ y3+" "+ x4+" "+ y4+"\n");// +" "+
        // (x2-x1) +" "+ (y2-y1) +"\n");
        sortedBarcodeResults.put(
            result.getText(), rectbox); // (qrdecoded string , rectangle box in pixels)

        g2.draw(rectbox); // draw box around qrcode

        Rectangle2D bubblebox =
            new Rectangle2D.Double(
                x2 + (x3 - x2) + 15, y2 - 20, 45, (y1 - y2) + 55); // box around bubbles
        g2.draw(bubblebox); // area that the bubbles exist in the image

        BufferedImage subBubble =
            PDF_img.getSubimage(
                (int) (x2 + (x3 - x2) + 15),
                (int) (y2 - 20),
                45,
                (int) ((y1 - y2) + 55)); // box around bubbles
        IplImage ipl_subBubble = IplImage.createFrom(subBubble); // convert subimage into iplimage
        IplImage ipl_subBubble_large =
            cvCreateImage(
                cvSize(ipl_subBubble.width() * 4, ipl_subBubble.height() * 4),
                ipl_subBubble.depth(),
                ipl_subBubble.nChannels());
        cvResize(ipl_subBubble, ipl_subBubble_large, CV_INTER_CUBIC); // enlarge image
        IplImage ipl_subBubble_gray =
            cvCreateImage(
                cvSize(ipl_subBubble_large.width(), ipl_subBubble_large.height()),
                IPL_DEPTH_8U,
                1); // create black and white version of page
        // IplImage ipl_subBubble_gray = ipl_subBubble_large.clone();

        if (ipl_subBubble_large.nChannels() > 1) {
          cvCvtColor(ipl_subBubble_large, ipl_subBubble_gray, CV_RGB2GRAY);
        } else {
          //  	IplImage ipl_subBubble_gray = ipl_subBubble_large.clone();
        }

        cvThreshold(ipl_subBubble_gray, ipl_subBubble_gray, 100, 255, CV_THRESH_OTSU);
        cvSmooth(ipl_subBubble_gray, ipl_subBubble_gray, CV_GAUSSIAN, 9, 9, 2, 2);
        CvMemStorage circles = CvMemStorage.create();

        // show bubbles, check this if no grading is working
        //							CanvasFrame smoothed = new CanvasFrame("gray image");
        //							smoothed.setDefaultCloseOperation(javax.swing.JFrame.EXIT_ON_CLOSE);
        //							smoothed.showImage(ipl_subBubble_gray);

        CvSeq seq =
            cvHoughCircles(
                ipl_subBubble_gray,
                circles,
                CV_HOUGH_GRADIENT,
                1,
                50,
                80,
                20,
                32,
                (int) (ipl_subBubble_gray.height() / (7)));

        Integer[][] FilledBubbles =
            new Integer[4][4]; // arry holds the #of pixels seen and the y dimention of subimage
        //							Vector<CvPoint> centers = new Vector<CvPoint>(4);//the 4 can be seq.total()
        for (int j = 0; j < seq.total(); j++) { // draw a circle around each circle found
          CvPoint3D32f xyr = new CvPoint3D32f(cvGetSeqElem(seq, j));
          CvPoint center = new CvPoint(Math.round(xyr.x()), Math.round(xyr.y()));
          int radius = Math.round(xyr.z());
          cvCircle(ipl_subBubble_large, center, 3, CvScalar.GREEN, -1, 8, 0); // center of circle
          cvCircle(ipl_subBubble_large, center, radius, CvScalar.BLUE, 3, 8, 0); // outer circle
          FilledBubbles[j][0] =
              FindBubbleSelected(center, radius, ipl_subBubble_gray); // bubble selected area
          //						        FilledBubbles[j][0] = 1; //here to get rid of dimensions error
          FilledBubbles[j][1] = Math.round(center.x());
          FilledBubbles[j][2] = Math.round(center.y());
          FilledBubbles[j][3] = Math.round(radius);
          // System.out.println("Filled bubble Count = "+ FilledBubbles[j]);
        } // end of look for circles for

        //							//the algorithm may not find circles //was trying to fix an old error, solved it by
        // fixing th size of the image on hte pdf to image conversion
        //							int anynull = anynulls(FilledBubbles);
        ////							System.out.println("anynull = "+ anynull);
        //							if(anynull == 1){
        //								numoffails++;
        //								continue; //this question, not all circles were found.
        //							}//end of null check //this means not all 4 circles were found

        //							System.out.println("filled bubbles size = " + FilledBubbles[0].length);
        //							System.out.println("filled bubbles size = " + FilledBubbles.length);
        FilledBubbles =
            SortbyYdimention(
                FilledBubbles); // note to self, check for nulls because that woud be an issue....

        // print out area of bubble
        //					        for(Integer[] tp : FilledBubbles){
        //					        	System.out.println("Filled bubble Count = "+ tp[0] + " loc = "+ tp[1]);
        //					        }

        int[] selectResult =
            ReturnIndexOfmax(FilledBubbles); // maxindex = the answer submitted by the student
        int maxIndex = selectResult[0];
        int isfound = 1;
        int ismulti = 0;
        if (selectResult[1] > 1
            || selectResult[2]
                == 1) { // selectResult[1] = number of bubbles , selectResult[2] = no selections
          // made
          System.out.println("more than one bubble was selected");
          //					        	Aindex++; //index for looping through answer array //need to be
          // incremented to keep data correct
          //					        	index++; //(0-number of questions) //need to be incremented to keep data
          // correct
          //					        	numoffails++; //student selected too many inputs, hence trying to cheat
          // and
          isfound = 0;
          ismulti = 1;
          //					        	continue;
        } // end of slectResults[1] if

        /* GRADE THE RESULTS!!! */
        //  TestObject =mongo query result, Aindex  = question being looked at

        String QID =
            new String(
                TestAnswerSheet.get(Aindex).get("IDS").getTextValue()); // grab the question  ID
        int CorrectAnswerloc =
            TestAnswerSheet.get(Aindex).get("Answer").getIntValue(); // correct answer location

        System.out.println("Correc answer location = " + CorrectAnswerloc);
        System.out.println("IDS = " + QID + " QI = " + Aindex);

        int iscorrect = 0;
        if (ismulti == 1) { // if multiple selected
          iscorrect = 0;
        } else { // if only one input for a question is found
          iscorrect = checkcorrectness(CorrectAnswerloc, maxIndex);
        }

        // create the student selections by question found
        BasicDBObject newvals = new BasicDBObject();
        String Answersnum = new String("TestAnswerSheet." + Integer.toString(Aindex));
        newvals.put(Answersnum + ".found", isfound);
        newvals.put(Answersnum + ".multiselect", ismulti);
        //					        newvals.put(Answersnum + ".correct", iscorrect);
        //					        newvals.put(Answersnum + ".selected", maxIndex);
        BasicDBObject posop = new BasicDBObject("$set", newvals);
        System.out.println("inc query = " + posop.toString());
        coll.update(new BasicDBObject("_id", new ObjectId(message)), posop);

        //					        System.out.println("first character = " + QID.charAt(0));
        //					        System.out.println("last character = " + QID.charAt(2));

        char stud =
            QID.charAt(0); // this is the student //QID starts at 1, not at 0 hence the negative
        char Q = QID.charAt(2); // this is the question
        System.out.println("Student num = " + stud);
        System.out.println(
            "Q num = "
                + Character.getNumericValue(Q - 1)); // QID starts at 1, not at 0 hence the negative

        // Aggregate information to create Test Results array
        int Qint =
            Aindex
                % numofquestions; // Qint = the question number of the test -1(includes 0 hence the
        // -1) //should be equivalent to char Q
        //					        System.out.println("Score for this question = " +
        // Questions.get(Qint).get("Score").getDoubleValue());
        if (iscorrect == 1) {
          System.out.println("mod result = " + Qint);
          System.out.println("Question = " + Qint + " is correct = " + iscorrect);
          CorrectlyAnswered[Qint] =
              CorrectlyAnswered[Qint] + 1; // byquestion.IncrementCorrectlyAnswered(Qint);
          byquestion.IncrementCorrectlyAnswered(Qint);
          bystudent.IncrementCorrectlyAnswered(Character.getNumericValue(stud));
          byquestion.InsertScore(Character.getNumericValue(stud), Qint);
        } else if (iscorrect
            == 0) { // wrong answer was selected // Selections // or multiple selections
          System.out.println("mod result = " + Qint);
          System.out.println("Question = " + Qint + " is Incorrect = " + iscorrect);
          IncorrectlyAnswered[Qint] =
              IncorrectlyAnswered[Qint] + 1; // byquestion.IncrementCorrectlyAnswered(Qint);
          byquestion.IncrementIncorrectlyAnswered(Qint);
          bystudent.IncrementIncorrectlyAnswered(Character.getNumericValue(stud));
        }

        byquestion.IncrementSelectedAnswer(
            maxIndex, Qint); // increment the number of times a selection was made

        Selections[Character.getNumericValue(stud)][Qint] = maxIndex;
        SelectionTotal[Qint][maxIndex] =
            SelectionTotal[Qint][maxIndex]
                + 1; // byquestion.IncrementSelectedWrongAnwer(Qint, maxIndex);
        bystudent.IncrementRepliedTo(Character.getNumericValue(stud));

        Aindex++; // index for looping through answer array
        /* END GRADE THE RESULTS!!! */
        //  TestObject

        // draw the red circles
        CvPoint slectedcenter =
            new CvPoint(
                FilledBubbles[maxIndex][1].intValue(), FilledBubbles[maxIndex][2].intValue());
        cvCircle(
            ipl_subBubble_large,
            slectedcenter,
            FilledBubbles[maxIndex][3].intValue(),
            CvScalar.RED,
            3,
            8,
            0);

        // saving subimages to i can debug results
        //							String subimagename = new String("subimage_"+i+"_"+index+".jpg");
        index++; // (0-number of questions)
        //							cvSaveImage(subimagename,ipl_subBubble_large);
        // create image window named "My Image"
        //							String que = new String("_for_"+ result.getText());
        //						    final CanvasFrame canvas = new CanvasFrame("Bubbles_Found"+que);
        //						 // request closing of the application when the image window is closed
        //						    canvas.setDefaultCloseOperation(javax.swing.JFrame.EXIT_ON_CLOSE);
        //						 // show image on window
        //						    canvas.showImage(ipl_subBubble_large);

        System.out.println("____________________________________");
      } // end of for results loop
      // end drawing boxes around each QR CODE

      //					//START code to display in JFRAME
      //					if(i == 0){
      //			       frame.getContentPane().setLayout(new FlowLayout());
      //			       frame.getContentPane().add(new JLabel(new ImageIcon(outimage)));
      //			       frame.pack();
      //			       frame.setVisible(true);
      //					}
      //					else {
      //
      //						frame.getContentPane().add(new JLabel(new ImageIcon(outimage)));
      //				        frame.pack();
      //				        frame.setVisible(true);
      //
      //					}
      //					//END code to display in JFRAME

    } // end of for loop of pages

    // putput how well teh students performed on test
    for (int i = 0; i < numofstudents; i++) {
      System.out.println(
          "student" + i + "answered Correctly: " + bystudent.CorrectlyAnswered[i] + " Questions");
      System.out.println(
          "student"
              + i
              + "answered Incorrectly: "
              + bystudent.IncorrectlyAnswered[i]
              + " Questions");
      System.out.println("student" + i + "answered: " + bystudent.RepliedTo[i] + " Questions");
    }

    // results by student and question
    for (int i = 0; i < Selections.length; i++) {
      for (int j = 0; j < Selections[0].length; j++) {
        System.out.println("Student (" + i + "," + j + ") selected = " + Selections[i][j]);
      }
    }

    // results by question and reply
    for (int i = 0; i < SelectionTotal.length; i++) {
      System.out.println(
          "Selection below = "
              + byquestion.SelectedWrongAnswer_0[i]
              + " "
              + byquestion.SelectedWrongAnswer_1[i]
              + " "
              + byquestion.SelectedWrongAnswer_2[i]
              + " "
              + byquestion.SelectedCorrectAnswer[i]
              + " ");
      System.out.println(
          "correctly answered = " + byquestion.CorrectlyAnswered[i] + " " + CorrectlyAnswered[i]);
      for (int j = 0; j < SelectionTotal[0].length; j++) {
        System.out.println("Quesetion (" + i + "," + j + ") selected = " + SelectionTotal[i][j]);
      }
    } // end of selctiontotal for loop

    byquestion.ComputePercentCorrectlyAnswered();
    byquestion.ComputePercentIncorrectlyAnswered();
    byquestion.ComputePercentCorrectSTD();
    byquestion.ComputeMeanScoreByQuestion(); // average score for any question by question
    //		byquestion.ComputeMeanScoreByStudent(); //average score for any one question by student
    byquestion.ComputeMeanbyQuestionSTD();
    bystudent.ComputeTotalScores(
        byquestion.Scoresbystudent); // compute the total scores for any student
    bystudent.ComputeMeanTotalScore(byquestion.Scoresbystudent);
    byTest bytest = new byTest(numofquestions, numofstudents, bystudent);
    bytest.ComputeMeanScoreTest();
    bytest.ComputeMeanScoreSTD();
    bytest.ComputePercentCorrecltyAnswered();
    bytest.ComputePercentIncorrecltyAnswered();

    // create Test Results by question
    ArrayList<BasicDBObject> TestResultbyQuestion =
        new ArrayList<BasicDBObject>(); // Array of the answer locations
    for (int j = 0; j < byquestion.CorrectlyAnswered.length; j++) {
      BasicDBObject ByQuestionVals = new BasicDBObject();
      ByQuestionVals.put("SelectedWrongAnswer_0", byquestion.SelectedWrongAnswer_0[j]);
      ByQuestionVals.put("SelectedWrongAnswer_1", byquestion.SelectedWrongAnswer_1[j]);
      ByQuestionVals.put("SelectedWrongAnswer_2", byquestion.SelectedWrongAnswer_2[j]);
      ByQuestionVals.put("SelectedCorrectAnswer", byquestion.SelectedCorrectAnswer[j]);
      ByQuestionVals.put("CorrectlyAnswered", byquestion.CorrectlyAnswered[j]);
      ByQuestionVals.put("IncorrectlyAnswered", byquestion.IncorrectlyAnswered[j]);
      ByQuestionVals.put("PercentCorrect", byquestion.PercentCorrectlyAnswered[j]);
      ByQuestionVals.put("PercentIncorrect", byquestion.PercentIncorrectlyAnswered[j]);
      ByQuestionVals.put("STD", byquestion.STD[j]);
      ByQuestionVals.put("Mean", byquestion.ScoreMean[j]); // means score for this question
      ByQuestionVals.put("_id", new ObjectId());
      TestResultbyQuestion.add(ByQuestionVals); // add Rvals into the Testresultarray listarray
      //			System.out.println("Question " + j + " numcorrect = " + CorrectlyAnswered[j]);
    }

    // create Test Results by test
    BasicDBObject ByTestVals = new BasicDBObject();
    ByTestVals.put("Mean", bytest.ScoreMean);
    ByTestVals.put("STD", bytest.ScoreSTD);
    ByTestVals.put("PercentCorrect", bytest.PercentCorrectlyAnswered);
    ByTestVals.put("PercentInorrect", bytest.PercentIncorrectlyAnswered);
    ByTestVals.put("_id", new ObjectId());

    // create graded exists
    BasicDBObject TestGradedVals = new BasicDBObject();
    TestGradedVals.put("WasGraded", 1);
    Date now = new Date();
    TestGradedVals.put("GradeOn", now);
    TestGradedVals.put("_id", new ObjectId());

    // create Test Results by  student
    ArrayList<BasicDBObject> TestResultbyStudent =
        new ArrayList<BasicDBObject>(); // Array of the answers by student
    for (int j = 0; j < bystudent.CorrectlyAnswered.length; j++) {
      BasicDBObject ByStudentVals = new BasicDBObject();
      ByStudentVals.put("CorrectlyAnswered", bystudent.CorrectlyAnswered[j]);
      ByStudentVals.put("IncorrectlyAnswered", bystudent.IncorrectlyAnswered[j]);
      ByStudentVals.put("RepliedTo", bystudent.RepliedTo[j]);
      ByStudentVals.put("ScoreTotal", bystudent.ScoreTotal[j]);
      //			ByStudentVals.put("ScoreMean", bystudent.ScoreMean[j]); //this is still wrong, unless i
      // want ot show the mean of score for any 1 question
      ByStudentVals.put("_id", new ObjectId());
      TestResultbyStudent.add(ByStudentVals); // add Rvals into the Testresultarray listarray
      //			System.out.println("Question " + j + " numcorrect = " + CorrectlyAnswered[j]);
    }

    // v1
    BasicDBObject TRbyQuestions = new BasicDBObject("TRbyQuestions", TestResultbyQuestion);
    BasicDBObject set = new BasicDBObject("$set", TRbyQuestions);
    //		System.out.println("Test result query = " + TRbyQuestions);
    coll.update(new BasicDBObject("_id", new ObjectId(message)), set);

    BasicDBObject TRbyTest = new BasicDBObject("TRbyTest", ByTestVals);
    BasicDBObject settest = new BasicDBObject("$set", TRbyTest);
    coll.update(new BasicDBObject("_id", new ObjectId(message)), settest);

    BasicDBObject TestGradedobject = new BasicDBObject("TestGraded", TestGradedVals);
    BasicDBObject settestgraded = new BasicDBObject("$set", TestGradedobject);
    coll.update(new BasicDBObject("_id", new ObjectId(message)), settestgraded);

    BasicDBObject TRbyStudent = new BasicDBObject("TRbyStudents", TestResultbyStudent);
    BasicDBObject set1 = new BasicDBObject("$set", TRbyStudent);
    coll.update(new BasicDBObject("_id", new ObjectId(message)), set1);

    // v2
    //		DBObject TestObject2 = coll.findOne(new BasicDBObject("_id", new ObjectId(message))); //the
    // actual mongo query
    //		TestObject2.put("CorrectlyAnswered", TestResultsarray);
    //		coll.save(TestObject2);

    System.out.println("Failed to grade " + numoffails + " questions");
    doc.close();
  } // end of Grader
示例#26
0
 private void initRGB() {
   this.red = gray.clone();
   this.green = gray.clone();
   this.blue = gray.clone();
   cvSplit(color, blue, green, red, null);
 }
示例#27
0
 public int getWidth() {
   return img.width();
 }
示例#28
0
 public int getHeight() {
   return img.height();
 }
  public boolean record(IplImage image, int pixelFormat) throws Exception {
    if (video_st == null) {
      throw new Exception(
          "No video output stream (Is imageWidth > 0 && imageHeight > 0 and has start() been called?)");
    }
    int ret;

    if (image == null) {
      /* no more frame to compress. The codec has a latency of a few
      frames if using B frames, so we get the last frames by
      passing the same picture again */
    } else {
      int width = image.width();
      int height = image.height();
      int step = image.widthStep();
      BytePointer data = image.imageData();

      if (pixelFormat == AV_PIX_FMT_NONE) {
        int depth = image.depth();
        int channels = image.nChannels();
        if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 3) {
          pixelFormat = AV_PIX_FMT_BGR24;
        } else if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 1) {
          pixelFormat = AV_PIX_FMT_GRAY8;
        } else if ((depth == IPL_DEPTH_16U || depth == IPL_DEPTH_16S) && channels == 1) {
          pixelFormat = AV_HAVE_BIGENDIAN() ? AV_PIX_FMT_GRAY16BE : AV_PIX_FMT_GRAY16LE;
        } else if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 4) {
          pixelFormat = AV_PIX_FMT_RGBA;
        } else if ((depth == IPL_DEPTH_8U || depth == IPL_DEPTH_8S) && channels == 2) {
          pixelFormat = AV_PIX_FMT_NV21; // Android's camera capture format
          step = width;
        } else {
          throw new Exception(
              "Could not guess pixel format of image: depth=" + depth + ", channels=" + channels);
        }
      }

      if (video_c.pix_fmt() != pixelFormat
          || video_c.width() != width
          || video_c.height() != height) {
        /* convert to the codec pixel format if needed */
        img_convert_ctx =
            sws_getCachedContext(
                img_convert_ctx,
                width,
                height,
                pixelFormat,
                video_c.width(),
                video_c.height(),
                video_c.pix_fmt(),
                SWS_BILINEAR,
                null,
                null,
                null);
        if (img_convert_ctx == null) {
          throw new Exception(
              "sws_getCachedContext() error: Cannot initialize the conversion context.");
        }
        avpicture_fill(tmp_picture, data, pixelFormat, width, height);
        avpicture_fill(picture, picture_buf, video_c.pix_fmt(), video_c.width(), video_c.height());
        tmp_picture.linesize(0, step);
        sws_scale(
            img_convert_ctx,
            new PointerPointer(tmp_picture),
            tmp_picture.linesize(),
            0,
            height,
            new PointerPointer(picture),
            picture.linesize());
      } else {
        avpicture_fill(picture, data, pixelFormat, width, height);
        picture.linesize(0, step);
      }
    }

    if ((oformat.flags() & AVFMT_RAWPICTURE) != 0) {
      if (image == null) {
        return false;
      }
      /* raw video case. The API may change slightly in the future for that? */
      av_init_packet(video_pkt);
      video_pkt.flags(video_pkt.flags() | AV_PKT_FLAG_KEY);
      video_pkt.stream_index(video_st.index());
      video_pkt.data(new BytePointer(picture));
      video_pkt.size(Loader.sizeof(AVPicture.class));
    } else {
      /* encode the image */
      av_init_packet(video_pkt);
      video_pkt.data(video_outbuf);
      video_pkt.size(video_outbuf_size);
      picture.quality(video_c.global_quality());
      if ((ret =
              avcodec_encode_video2(
                  video_c, video_pkt, image == null ? null : picture, got_video_packet))
          < 0) {
        throw new Exception(
            "avcodec_encode_video2() error " + ret + ": Could not encode video packet.");
      }
      picture.pts(picture.pts() + 1); // magic required by libx264

      /* if zero size, it means the image was buffered */
      if (got_video_packet[0] != 0) {
        if (video_pkt.pts() != AV_NOPTS_VALUE) {
          video_pkt.pts(av_rescale_q(video_pkt.pts(), video_c.time_base(), video_st.time_base()));
        }
        if (video_pkt.dts() != AV_NOPTS_VALUE) {
          video_pkt.dts(av_rescale_q(video_pkt.dts(), video_c.time_base(), video_st.time_base()));
        }
        video_pkt.stream_index(video_st.index());
      } else {
        return false;
      }
    }

    synchronized (oc) {
      /* write the compressed frame in the media file */
      if (interleaved && audio_st != null) {
        if ((ret = av_interleaved_write_frame(oc, video_pkt)) < 0) {
          throw new Exception(
              "av_interleaved_write_frame() error "
                  + ret
                  + " while writing interleaved video frame.");
        }
      } else {
        if ((ret = av_write_frame(oc, video_pkt)) < 0) {
          throw new Exception("av_write_frame() error " + ret + " while writing video frame.");
        }
      }
    }
    return picture.key_frame() != 0;
  }
示例#30
0
 public static BufferedImage paint(BufferedImage input, Block node) {
   IplImage canvas = IplImage.createFrom(input);
   paint(canvas, node);
   return canvas.getBufferedImage();
 }