예제 #1
0
 public void run() {
   try {
     cam.getBrightness();
     ColorImage image = cam.getImage();
   } catch (Exception e) {
     System.out.println(e.getClass());
   }
 }
예제 #2
0
 /** This function is called periodically during operator control */
 public void teleopInit() {
   ColorImage image;
   try {
     image = camera.getImage();
     image.write("unedited.jpg");
     BinaryImage bImage = image.thresholdRGB(160, 255, 160, 255, 160, 255);
     bImage.write("whitemask.jpg");
     MonoImage mImage = image.getLuminancePlane();
     mImage.write("luminancePlane.jpg");
     image.free();
     bImage.free();
   } catch (NIVisionException e) {
     System.out.println("Error retrieving image: NIVisionException");
     e.printStackTrace();
   } catch (AxisCameraException e) {
     System.out.println("Error retrieving image: AxisCameraException");
     e.printStackTrace();
   }
 }
 public boolean processImage() {
   boolean debugWriteImages = true;
   boolean success = cam.freshImage();
   if (success) {
     try {
       System.out.println("In Try loop");
       ColorImage im = cam.getImage();
       System.out.println("Got image");
       if (debugWriteImages) {
         im.write("image1.jpg");
         System.out.println("Wrote color image");
       }
       BinaryImage thresholdIm =
           im.thresholdRGB(redLow, redHigh, greenLow, greenHigh, blueLow, blueHigh);
       if (debugWriteImages) {
         thresholdIm.write("image2.jpg");
         System.err.println("Wrote Threshold Image");
       }
       BinaryImage filteredBoxIm = thresholdIm.particleFilter(boxCriteria);
       ParticleAnalysisReport[] xparticles = filteredBoxIm.getOrderedParticleAnalysisReports();
       System.out.println(xparticles.length + " particles at " + Timer.getFPGATimestamp());
       BinaryImage filteredInertiaIm = filteredBoxIm.particleFilter(inertiaCriteria);
       ParticleAnalysisReport[] particles = filteredInertiaIm.getOrderedParticleAnalysisReports();
       System.out.println(particles.length + " particles at " + Timer.getFPGATimestamp());
       // Loop through targets, find highest one.
       // Targets aren't found yet.
       highTarget = Target.NullTarget;
       target1 = Target.NullTarget;
       target2 = Target.NullTarget;
       target3 = Target.NullTarget;
       target4 = Target.NullTarget;
       System.out.println("Targets created");
       double minY = IMAGE_HEIGHT; // Minimum y <-> higher in image.
       for (int i = 0; i < particles.length; i++) {
         Target t = new Target(i, particles[i]);
         if (t.ratio > ratioMin && t.ratio < ratioMax) {
           addTarget(t);
           if (t.centerY <= minY) {
             highTarget = t;
           }
         }
         System.out.println(
             "Target "
                 + i
                 + ": ("
                 + t.centerX
                 + ","
                 + t.centerY
                 + ") Distance: "
                 + getDistance(t));
       }
       System.out.println("Best target: " + highTarget.index);
       System.out.println("Distance to the target: " + getDistance(highTarget));
       if (debugWriteImages) {
         filteredBoxIm.write("image3.jpg");
         filteredInertiaIm.write("image4.jpg");
         System.out.println("Wrote Images");
       }
       // Free memory from images.
       im.free();
       thresholdIm.free();
       filteredBoxIm.free();
       filteredInertiaIm.free();
     } catch (AxisCameraException ex) {
       System.out.println("Axis Camera Exception Gotten" + ex.getMessage());
       ex.printStackTrace();
     } catch (NIVisionException ex) {
       System.out.println("NIVision Exception Gotten - " + ex.getMessage());
       ex.printStackTrace();
     }
   }
   return success;
 }
예제 #4
0
  /**
   * Analyzes an image taken by the Axis Camera, performing various functions to determine where the
   * goal is and at what goal the robot is facing
   *
   * @param
   * @return Array of doubles where the first value is the type of goal (2.0 high, 1.0 middle, 0.0
   *     no goal), the x-centered-normalized value, the y-centered-normalized value and the
   *     distance. If the distance is negative, no goal was found.
   */
  public double[] analyze() {
    double[] data = new double[3];
    // [goal type, x-centered, y-centered, distance]

    try {
      ColorImage image = camera.getImage(); // Get image from camera
      BinaryImage thresholdImage;
      thresholdImage =
          image.thresholdHSV(120, 120, 44, 80, 98, 100); // "Look" for objects in this HSV range
      BinaryImage convexHullImage = thresholdImage.convexHull(false); // Fill in occluded rectangles
      BinaryImage filteredImage =
          convexHullImage.particleFilter(collection); // Find filled in rectangles

      Scores scores[] = new Scores[filteredImage.getNumberParticles()];

      for (int i = 0; i < scores.length; i++) {
        ParticleAnalysisReport report =
            filteredImage.getParticleAnalysisReport(i); // Get the report for each particle found
        scores[i] = new Scores();

        scores[i].rectangularity = scoreRectangularity(report);
        scores[i].aspectRatioOuter = scoreAspectRatio(filteredImage, report, i, true);
        scores[i].aspectRatioInner = scoreAspectRatio(filteredImage, report, i, false);
        scores[i].xEdge = scoreXEdge(thresholdImage, report);
        scores[i].yEdge = scoreYEdge(thresholdImage, report);

        if (scoreCompare(scores[i], false)) {
          double dist = computeDistance(thresholdImage, report, i, false);
          System.out.println(
              "particle: "
                  + i
                  + " is a High Goal  centerX: "
                  + report.center_mass_x_normalized
                  + "centerY: "
                  + report.center_mass_y_normalized);
          System.out.println("Distance: " + dist);
          data[0] = 2.0;
          data[1] = report.center_mass_x_normalized;
          data[2] = report.center_mass_y_normalized;
          data[3] = dist;
        } else if (scoreCompare(scores[i], true)) {
          double dist = computeDistance(thresholdImage, report, i, false);
          System.out.println(
              "particle: "
                  + i
                  + " is a Middle Goal  centerX: "
                  + report.center_mass_x_normalized
                  + "centerY: "
                  + report.center_mass_y_normalized);
          System.out.println("Distance: " + dist);
          data[0] = 1.0;
          data[1] = report.center_mass_x_normalized;
          data[2] = report.center_mass_y_normalized;
          data[3] = dist;
        } else {
          System.out.println(
              "particle: "
                  + i
                  + " is not a goal  centerX: "
                  + report.center_mass_x_normalized
                  + "centerY: "
                  + report.center_mass_y_normalized);
          data[0] = 0.0;
          data[1] = report.center_mass_x_normalized;
          data[2] = report.center_mass_y_normalized;
          data[3] = -1.0;
        }
        System.out.println(
            "rect: " + scores[i].rectangularity + "ARinner: " + scores[i].aspectRatioInner);
        System.out.println(
            "ARouter: "
                + scores[i].aspectRatioOuter
                + "xEdge: "
                + scores[i].xEdge
                + "yEdge: "
                + scores[i].yEdge);
      }

      /* MUST USE FREE FUNCTION: all images are currently allocated in C structures */
      filteredImage.free();
      convexHullImage.free();
      thresholdImage.free();
      image.free();
    } // end analyze()
    catch (AxisCameraException e) {
      data[0] = 0.0;
      data[1] = 0.0;
      data[2] = 0.0;
      data[3] = -1.0;
      SmartDashboard.putString("ERROR: ", "Camera malfunction!");
    } catch (NIVisionException ex) {
      data[0] = 0.0;
      data[1] = 0.0;
      data[2] = 0.0;
      data[3] = -1.0;
      SmartDashboard.putString("ERROR: ", "NIVision Exception!");
    }

    return data;
  }
  public DetectedPoint[] getTargetCoordinates(TrackingCriteria criteria) {
    // Most important bit of this code...
    final long thisAlgorithmBecomingSkynetCost = 99999999;
    ColorImage colorImage = null;
    BinaryImage binaryImage = null;
    BinaryImage resultImage = null;
    DetectedPoint[] results = null;

    try {
      if (!USE_CAMERA) {
        colorImage = new RGBImage("inputImage.jpg");
      } else {
        do {
          colorImage = imageTrackingCamera.getImage();
        } while (!imageTrackingCamera.freshImage());
      }

      int hueLow = criteria.getMinimumHue();
      int hueHigh = criteria.getMaximumHue();
      int saturationLow = criteria.getMinimumSaturation();
      int saturationHigh = criteria.getMaximumSaturation();
      int valueLow = criteria.getMinimumValue();
      int valueHigh = criteria.getMaximumValue();

      // Attempt to isolate the colours of the LED ring
      binaryImage =
          colorImage.thresholdHSV(
              hueLow, hueHigh, saturationLow, saturationHigh, valueLow, valueHigh);
      // Fill in any detected "particles" to make analysis easier
      // See:
      // http://zone.ni.com/reference/en-XX/help/372916L-01/nivisionconcepts/advanced_morphology_operations/
      binaryImage.convexHull(true);
      resultImage = binaryImage.removeSmallObjects(true, 3);

      ParticleAnalysisReport[] reports = resultImage.getOrderedParticleAnalysisReports();
      results = new DetectedPoint[reports.length];
      int pointIndex = 0;
      for (int i = 0; i < reports.length; i++) {
        ParticleAnalysisReport report = reports[i];
        int aspectRatio = report.boundingRectWidth / report.boundingRectHeight;
        double area = report.particleArea;
        double aspectError = (aspectRatio - criteria.getAspectRatio()) / criteria.getAspectRatio();
        double areaError = (area - criteria.getParticleArea()) / criteria.getParticleArea();
        aspectError = Math.abs(aspectError);
        areaError = Math.abs(areaError);
        if (aspectError < criteria.getAspectTolerance()
            && areaError < criteria.getAreaTolerance()) {
          results[pointIndex] =
              new DetectedPoint(report.center_mass_x_normalized, report.center_mass_y_normalized);
          pointIndex++;
        }
      }

      log(pointIndex + " point Index, " + results.length + " results.length");
      // Remove the empty slots in the array
      if (pointIndex < results.length) {
        DetectedPoint[] compressedPoints = new DetectedPoint[pointIndex];
        int x = 0;
        for (int i = 0; i < results.length; i++) {
          if (results[i] != null) {
            compressedPoints[x] = results[i];
            x++;
          }
        }
        results = compressedPoints;
      }
    } catch (AxisCameraException ex) {
      log("Unable to grab images from the image tracking camera");
      ex.printStackTrace();
    } catch (NIVisionException ex) {
      log("Encountered a NIVisionException while trying to acquire coordinates");
      ex.printStackTrace();
    } finally {
      try {
        log("We're actually freeing things!");
        // For debugging purposes
        // colorImage.write("colorImage.jpg");
        // binaryImage.write("binaryImage.jpg");
        // resultImage.write("resultImage.jpg");

        if (colorImage != null) colorImage.free();
        if (binaryImage != null) binaryImage.free();
        if (resultImage != null) resultImage.free();
        colorImage = null;
        binaryImage = null;
        resultImage = null;
      } catch (NIVisionException ex) {
        // Really? Throw an exception while freeing memory?
        log("Encountered an exception while freeing memory... Really NI? Really?");
        ex.printStackTrace();
      }
    }
    return results;
  }
예제 #6
0
  public TargetScores[] visionAnalyze() {
    // TargetScores[] tScores_Out;// = new TargetScores[1][];
    TargetScores[] tScores = null;
    try {
      /**
       * Do the image capture with the camera and apply the algorithm described above. This sample
       * will either get images from the camera or from an image file stored in the top level
       * directory in the flash memory on the cRIO. The file name in this case is "testImage.jpg"
       */
      ColorImage image = camera.getImage(); // comment if using stored images
      // ColorImage image;                           // next 2 lines read image from flash on cRIO
      // image = new RGBImage("/testImage.jpg");		// get the sample image from the cRIO flash
      BinaryImage thresholdImage =
          image.thresholdHSV(60, 100, 90, 255, 20, 255); // keep only red objects
      // thresholdImage.write("/threshold.bmp");
      BinaryImage convexHullImage = thresholdImage.convexHull(false); // fill in occluded rectangles
      // convexHullImage.write("/convexHull.bmp");
      BinaryImage filteredImage = convexHullImage.particleFilter(cc); // filter out small particles
      // filteredImage.write("/filteredImage.bmp");

      // iterate through each particle and score to see if it is a target
      Scores scores[] = new Scores[filteredImage.getNumberParticles()];
      tScores = new TargetScores[filteredImage.getNumberParticles()];
      for (int i = 0; i < scores.length; i++) {

        ParticleAnalysisReport report = filteredImage.getParticleAnalysisReport(i);
        scores[i] = new Scores();
        tScores[i] = new TargetScores();

        scores[i].rectangularity = scoreRectangularity(report);
        scores[i].aspectRatioOuter = scoreAspectRatio(filteredImage, report, i, true);
        scores[i].aspectRatioInner = scoreAspectRatio(filteredImage, report, i, false);
        scores[i].xEdge = scoreXEdge(thresholdImage, report);
        scores[i].yEdge = scoreYEdge(thresholdImage, report);

        if (scoreCompare(scores[i], false)) {
          tScores[i].goal = TargetScores.high;
        } else if (scoreCompare(scores[i], true)) {
          tScores[i].goal = TargetScores.med;
        }
        tScores[i].yVal = report.center_mass_y;
        tScores[i].xVal = report.center_mass_x;
        tScores[i].yValAim = axisToAim('y', tScores[i].yVal);
        /*
                         if(scoreCompare(scores[i], false))
                         {
                             System.out.println("particle: " + i + "is a High Goal  centerX: " + report.center_mass_x_normalized + "centerY: " + report.center_mass_y_normalized);
        System.out.println("Distance: " + computeDistance(thresholdImage, report, i, false));
                         } else if (scoreCompare(scores[i], true)) {
        System.out.println("particle: " + i + "is a Middle Goal  centerX: " + report.center_mass_x_normalized + "centerY: " + report.center_mass_y_normalized);
        System.out.println("Distance: " + computeDistance(thresholdImage, report, i, true));
                         } else {
                             System.out.println("particle: " + i + "is not a goal  centerX: " + report.center_mass_x_normalized + "centerY: " + report.center_mass_y_normalized);
                         }
        System.out.println("rect: " + scores[i].rectangularity + "ARinner: " + scores[i].aspectRatioInner);
        System.out.println("ARouter: " + scores[i].aspectRatioOuter + "xEdge: " + scores[i].xEdge + "yEdge: " + scores[i].yEdge);

                         */

      }

      /**
       * all images in Java must be freed after they are used since they are allocated out of C data
       * structures. Not calling free() will cause the memory to accumulate over each pass of this
       * loop.
       */
      filteredImage.free();
      convexHullImage.free();
      thresholdImage.free();
      image.free();

      // tScores_Out = new TargetScores[tScores.length];
      // tScores_Out = (TargetScores[])tScores.clone();

      // myScores = scores;

    } catch (AxisCameraException ex) { // this is needed if the camera.getImage() is called
      ex.printStackTrace();
    } catch (NIVisionException ex) {
      ex.printStackTrace();
    }
    if (tScores == null) {
      tScores = new TargetScores[1];
      tScores[1] = new NoScores();
    }
    return tScores; // tScores_Out;
  }