コード例 #1
0
ファイル: KitBotXII.java プロジェクト: garychen6/robotics610
 /** This function is called periodically during operator control */
 public void teleopInit() {
   ColorImage image;
   try {
     image = camera.getImage();
     image.write("unedited.jpg");
     BinaryImage bImage = image.thresholdRGB(160, 255, 160, 255, 160, 255);
     bImage.write("whitemask.jpg");
     MonoImage mImage = image.getLuminancePlane();
     mImage.write("luminancePlane.jpg");
     image.free();
     bImage.free();
   } catch (NIVisionException e) {
     System.out.println("Error retrieving image: NIVisionException");
     e.printStackTrace();
   } catch (AxisCameraException e) {
     System.out.println("Error retrieving image: AxisCameraException");
     e.printStackTrace();
   }
 }
コード例 #2
0
 public boolean processImage() {
   boolean debugWriteImages = true;
   boolean success = cam.freshImage();
   if (success) {
     try {
       System.out.println("In Try loop");
       ColorImage im = cam.getImage();
       System.out.println("Got image");
       if (debugWriteImages) {
         im.write("image1.jpg");
         System.out.println("Wrote color image");
       }
       BinaryImage thresholdIm =
           im.thresholdRGB(redLow, redHigh, greenLow, greenHigh, blueLow, blueHigh);
       if (debugWriteImages) {
         thresholdIm.write("image2.jpg");
         System.err.println("Wrote Threshold Image");
       }
       BinaryImage filteredBoxIm = thresholdIm.particleFilter(boxCriteria);
       ParticleAnalysisReport[] xparticles = filteredBoxIm.getOrderedParticleAnalysisReports();
       System.out.println(xparticles.length + " particles at " + Timer.getFPGATimestamp());
       BinaryImage filteredInertiaIm = filteredBoxIm.particleFilter(inertiaCriteria);
       ParticleAnalysisReport[] particles = filteredInertiaIm.getOrderedParticleAnalysisReports();
       System.out.println(particles.length + " particles at " + Timer.getFPGATimestamp());
       // Loop through targets, find highest one.
       // Targets aren't found yet.
       highTarget = Target.NullTarget;
       target1 = Target.NullTarget;
       target2 = Target.NullTarget;
       target3 = Target.NullTarget;
       target4 = Target.NullTarget;
       System.out.println("Targets created");
       double minY = IMAGE_HEIGHT; // Minimum y <-> higher in image.
       for (int i = 0; i < particles.length; i++) {
         Target t = new Target(i, particles[i]);
         if (t.ratio > ratioMin && t.ratio < ratioMax) {
           addTarget(t);
           if (t.centerY <= minY) {
             highTarget = t;
           }
         }
         System.out.println(
             "Target "
                 + i
                 + ": ("
                 + t.centerX
                 + ","
                 + t.centerY
                 + ") Distance: "
                 + getDistance(t));
       }
       System.out.println("Best target: " + highTarget.index);
       System.out.println("Distance to the target: " + getDistance(highTarget));
       if (debugWriteImages) {
         filteredBoxIm.write("image3.jpg");
         filteredInertiaIm.write("image4.jpg");
         System.out.println("Wrote Images");
       }
       // Free memory from images.
       im.free();
       thresholdIm.free();
       filteredBoxIm.free();
       filteredInertiaIm.free();
     } catch (AxisCameraException ex) {
       System.out.println("Axis Camera Exception Gotten" + ex.getMessage());
       ex.printStackTrace();
     } catch (NIVisionException ex) {
       System.out.println("NIVision Exception Gotten - " + ex.getMessage());
       ex.printStackTrace();
     }
   }
   return success;
 }
コード例 #3
0
  public DetectedPoint[] getTargetCoordinates(TrackingCriteria criteria) {
    // Most important bit of this code...
    final long thisAlgorithmBecomingSkynetCost = 99999999;
    ColorImage colorImage = null;
    BinaryImage binaryImage = null;
    BinaryImage resultImage = null;
    DetectedPoint[] results = null;

    try {
      if (!USE_CAMERA) {
        colorImage = new RGBImage("inputImage.jpg");
      } else {
        do {
          colorImage = imageTrackingCamera.getImage();
        } while (!imageTrackingCamera.freshImage());
      }

      int hueLow = criteria.getMinimumHue();
      int hueHigh = criteria.getMaximumHue();
      int saturationLow = criteria.getMinimumSaturation();
      int saturationHigh = criteria.getMaximumSaturation();
      int valueLow = criteria.getMinimumValue();
      int valueHigh = criteria.getMaximumValue();

      // Attempt to isolate the colours of the LED ring
      binaryImage =
          colorImage.thresholdHSV(
              hueLow, hueHigh, saturationLow, saturationHigh, valueLow, valueHigh);
      // Fill in any detected "particles" to make analysis easier
      // See:
      // http://zone.ni.com/reference/en-XX/help/372916L-01/nivisionconcepts/advanced_morphology_operations/
      binaryImage.convexHull(true);
      resultImage = binaryImage.removeSmallObjects(true, 3);

      ParticleAnalysisReport[] reports = resultImage.getOrderedParticleAnalysisReports();
      results = new DetectedPoint[reports.length];
      int pointIndex = 0;
      for (int i = 0; i < reports.length; i++) {
        ParticleAnalysisReport report = reports[i];
        int aspectRatio = report.boundingRectWidth / report.boundingRectHeight;
        double area = report.particleArea;
        double aspectError = (aspectRatio - criteria.getAspectRatio()) / criteria.getAspectRatio();
        double areaError = (area - criteria.getParticleArea()) / criteria.getParticleArea();
        aspectError = Math.abs(aspectError);
        areaError = Math.abs(areaError);
        if (aspectError < criteria.getAspectTolerance()
            && areaError < criteria.getAreaTolerance()) {
          results[pointIndex] =
              new DetectedPoint(report.center_mass_x_normalized, report.center_mass_y_normalized);
          pointIndex++;
        }
      }

      log(pointIndex + " point Index, " + results.length + " results.length");
      // Remove the empty slots in the array
      if (pointIndex < results.length) {
        DetectedPoint[] compressedPoints = new DetectedPoint[pointIndex];
        int x = 0;
        for (int i = 0; i < results.length; i++) {
          if (results[i] != null) {
            compressedPoints[x] = results[i];
            x++;
          }
        }
        results = compressedPoints;
      }
    } catch (AxisCameraException ex) {
      log("Unable to grab images from the image tracking camera");
      ex.printStackTrace();
    } catch (NIVisionException ex) {
      log("Encountered a NIVisionException while trying to acquire coordinates");
      ex.printStackTrace();
    } finally {
      try {
        log("We're actually freeing things!");
        // For debugging purposes
        // colorImage.write("colorImage.jpg");
        // binaryImage.write("binaryImage.jpg");
        // resultImage.write("resultImage.jpg");

        if (colorImage != null) colorImage.free();
        if (binaryImage != null) binaryImage.free();
        if (resultImage != null) resultImage.free();
        colorImage = null;
        binaryImage = null;
        resultImage = null;
      } catch (NIVisionException ex) {
        // Really? Throw an exception while freeing memory?
        log("Encountered an exception while freeing memory... Really NI? Really?");
        ex.printStackTrace();
      }
    }
    return results;
  }
コード例 #4
0
  public void autonomous() {

    try {
      /**
       * Do the image capture with the camera and apply the algorithm described above. This sample
       * will either get images from the camera or from an image file stored in the top level
       * directory in the flash memory on the cRIO. The file name in this case is "testImage.jpg"
       */
      ColorImage image = cam.getImage(); // comment if using stored images
      // ColorImage image;                           // next 2 lines read image from flash on cRIO
      //   image = new RGBImage("/testImage.jpg");		// get the sample image from the cRIO flash
      BinaryImage thresholdImage =
          image.thresholdHSV(60, 100, 90, 255, 20, 255); // keep only red objects
      // thresholdImage.write("/threshold.bmp");
      BinaryImage convexHullImage = thresholdImage.convexHull(false); // fill in occluded rectangles
      // convexHullImage.write("/convexHull.bmp");
      BinaryImage filteredImage = convexHullImage.particleFilter(cc); // filter out small particles
      // filteredImage.write("/filteredImage.bmp");

      ParticleAnalysisReport[] par = filteredImage.getOrderedParticleAnalysisReports();
      double[] target = new double[] {image.getWidth() / 2.0, image.getHeight()};

      double minDist = -1;
      double[] minMid = new double[0];

      // calculate closest rectangle
      if (par.length == 0) {
        done = true;
        return;
      }
      for (int i = 0; i < par.length; i++) {
        ParticleAnalysisReport p = par[i];
        double[] mid =
            new double[] {
              p.boundingRectLeft + p.boundingRectWidth / 2.0,
              p.boundingRectTop + p.boundingRectHeight / 2.0
            };
        double dist =
            Math.sqrt(
                (mid[0] - target[0]) * (mid[0] - target[0])
                    + (mid[1] - target[1]) * (mid[1] - target[1]));
        if (minDist == -1 || minDist > dist) {
          minDist = dist;
          minMid = mid;
        }
      }

      if (minDist != -1) {
        if (minMid[1] > target[1] + RobotMap.errorMargin) moveDown();
        else if (minMid[1] < target[1] - RobotMap.errorMargin) moveUp();
        else stopMove();
        if (minMid[0] > target[0] + RobotMap.errorMargin) turnRight();
        else if (minMid[0] < target[0] - RobotMap.errorMargin) turnLeft();
        else turnStop();
        if (stopped1 && stopped2) {
          done = true;
          return;
        }
      } else {
        done = true;
        return;
      }

      filteredImage.free();
      convexHullImage.free();
      thresholdImage.free();
      image.free();

      //	            } catch (AxisCameraException ex) {        // this is needed if the
      // camera.getImage() is called
      //	                ex.printStackTrace();
    } catch (NIVisionException ex) {
      ex.printStackTrace();
    } catch (AxisCameraException ace) {
      ace.printStackTrace();
    }
  }
コード例 #5
0
  public TargetScores[] visionAnalyze() {
    // TargetScores[] tScores_Out;// = new TargetScores[1][];
    TargetScores[] tScores = null;
    try {
      /**
       * Do the image capture with the camera and apply the algorithm described above. This sample
       * will either get images from the camera or from an image file stored in the top level
       * directory in the flash memory on the cRIO. The file name in this case is "testImage.jpg"
       */
      ColorImage image = camera.getImage(); // comment if using stored images
      // ColorImage image;                           // next 2 lines read image from flash on cRIO
      // image = new RGBImage("/testImage.jpg");		// get the sample image from the cRIO flash
      BinaryImage thresholdImage =
          image.thresholdHSV(60, 100, 90, 255, 20, 255); // keep only red objects
      // thresholdImage.write("/threshold.bmp");
      BinaryImage convexHullImage = thresholdImage.convexHull(false); // fill in occluded rectangles
      // convexHullImage.write("/convexHull.bmp");
      BinaryImage filteredImage = convexHullImage.particleFilter(cc); // filter out small particles
      // filteredImage.write("/filteredImage.bmp");

      // iterate through each particle and score to see if it is a target
      Scores scores[] = new Scores[filteredImage.getNumberParticles()];
      tScores = new TargetScores[filteredImage.getNumberParticles()];
      for (int i = 0; i < scores.length; i++) {

        ParticleAnalysisReport report = filteredImage.getParticleAnalysisReport(i);
        scores[i] = new Scores();
        tScores[i] = new TargetScores();

        scores[i].rectangularity = scoreRectangularity(report);
        scores[i].aspectRatioOuter = scoreAspectRatio(filteredImage, report, i, true);
        scores[i].aspectRatioInner = scoreAspectRatio(filteredImage, report, i, false);
        scores[i].xEdge = scoreXEdge(thresholdImage, report);
        scores[i].yEdge = scoreYEdge(thresholdImage, report);

        if (scoreCompare(scores[i], false)) {
          tScores[i].goal = TargetScores.high;
        } else if (scoreCompare(scores[i], true)) {
          tScores[i].goal = TargetScores.med;
        }
        tScores[i].yVal = report.center_mass_y;
        tScores[i].xVal = report.center_mass_x;
        tScores[i].yValAim = axisToAim('y', tScores[i].yVal);
        /*
                         if(scoreCompare(scores[i], false))
                         {
                             System.out.println("particle: " + i + "is a High Goal  centerX: " + report.center_mass_x_normalized + "centerY: " + report.center_mass_y_normalized);
        System.out.println("Distance: " + computeDistance(thresholdImage, report, i, false));
                         } else if (scoreCompare(scores[i], true)) {
        System.out.println("particle: " + i + "is a Middle Goal  centerX: " + report.center_mass_x_normalized + "centerY: " + report.center_mass_y_normalized);
        System.out.println("Distance: " + computeDistance(thresholdImage, report, i, true));
                         } else {
                             System.out.println("particle: " + i + "is not a goal  centerX: " + report.center_mass_x_normalized + "centerY: " + report.center_mass_y_normalized);
                         }
        System.out.println("rect: " + scores[i].rectangularity + "ARinner: " + scores[i].aspectRatioInner);
        System.out.println("ARouter: " + scores[i].aspectRatioOuter + "xEdge: " + scores[i].xEdge + "yEdge: " + scores[i].yEdge);

                         */

      }

      /**
       * all images in Java must be freed after they are used since they are allocated out of C data
       * structures. Not calling free() will cause the memory to accumulate over each pass of this
       * loop.
       */
      filteredImage.free();
      convexHullImage.free();
      thresholdImage.free();
      image.free();

      // tScores_Out = new TargetScores[tScores.length];
      // tScores_Out = (TargetScores[])tScores.clone();

      // myScores = scores;

    } catch (AxisCameraException ex) { // this is needed if the camera.getImage() is called
      ex.printStackTrace();
    } catch (NIVisionException ex) {
      ex.printStackTrace();
    }
    if (tScores == null) {
      tScores = new TargetScores[1];
      tScores[1] = new NoScores();
    }
    return tScores; // tScores_Out;
  }