Exemplo n.º 1
0
  public void init(int N, boolean isFundamental) {
    // define the camera's motion
    worldToCamera = new Se3_F64();
    worldToCamera
        .getR()
        .set(ConvertRotation3D_F64.eulerToMatrix(EulerType.XYZ, 0.05, -0.03, 0.02, null));
    worldToCamera.getT().set(0.1, -0.1, 0.01);

    // randomly generate points in space
    worldPts = GeoTestingOps.randomPoints_F64(-1, 1, -1, 1, 2, 3, N, rand);

    // transform points into second camera's reference frame
    pairs = new ArrayList<AssociatedPair>();
    currentObs = new ArrayList<Point2D_F64>();

    for (Point3D_F64 p1 : worldPts) {
      Point3D_F64 p2 = SePointOps_F64.transform(worldToCamera, p1, null);

      AssociatedPair pair = new AssociatedPair();
      pair.p1.set(p1.x / p1.z, p1.y / p1.z);
      pair.p2.set(p2.x / p2.z, p2.y / p2.z);
      pairs.add(pair);

      if (isFundamental) {
        GeometryMath_F64.mult(K, pair.p1, pair.p1);
        GeometryMath_F64.mult(K, pair.p2, pair.p2);
      }

      currentObs.add(pair.p2);
    }
  }
  /** The two sets of points match perfectly and with a little bit of noise */
  @Test
  public void test3D() {
    for (double noise = 0; noise <= 0.01; noise += 0.01) {
      // can only correct small changes
      DenseMatrix64F R = RotationMatrixGenerator.eulerXYZ(0.01, -0.002, 0.03, null);
      Vector3D_F64 T = new Vector3D_F64(0.02, 0.03, 0.01);

      Se3_F64 tran = new Se3_F64(R, T);

      List<Point3D_F64> srcPts = UtilPoint3D_F64.random(-10, 10, 30, rand);
      List<Point3D_F64> srcOrig = UtilPoint3D_F64.copy(srcPts);
      List<Point3D_F64> modelPts = new ArrayList<Point3D_F64>();
      for (Point3D_F64 p : srcPts) {
        modelPts.add(SePointOps_F64.transform(tran, p, null));
      }

      // add noise
      UtilPoint3D_F64.noiseNormal(modelPts, noise, rand);

      PointModel<Point3D_F64> model = new PointModel<Point3D_F64>(modelPts);
      StoppingCondition stop = new StoppingCondition(10, 0.1 * noise / srcPts.size() + 1e-8);
      IterativeClosestPoint<Se3_F64, Point3D_F64> alg =
          new IterativeClosestPoint<Se3_F64, Point3D_F64>(stop, new MotionSe3PointSVD_F64());
      alg.setModel(model);

      alg.process(srcPts);

      Se3_F64 foundTran = alg.getPointsToModel();

      checkTransform(srcOrig, modelPts, foundTran, noise * 10 + 1e-8);
    }
  }
Exemplo n.º 3
0
  /**
   * Renders a 3D point in the left and right camera views given the stereo parameters. Lens
   * distortion is taken in account.
   *
   * @param param Stereo parameters
   * @param X Point location in 3D space
   * @param left location in pixels in left camera
   * @param right location in pixels in right camera
   */
  public static void renderPointPixel(
      StereoParameters param, Point3D_F64 X, Point2D_F64 left, Point2D_F64 right) {
    // compute the location of X in the right camera's reference frame
    Point3D_F64 rightX = new Point3D_F64();
    SePointOps_F64.transform(param.getRightToLeft().invert(null), X, rightX);

    // location of object in normalized image coordinates
    Point2D_F64 normLeft = new Point2D_F64(X.x / X.z, X.y / X.z);
    Point2D_F64 normRight = new Point2D_F64(rightX.x / rightX.z, rightX.y / rightX.z);

    // convert into pixel coordinates
    Point2D_F64 pixelLeft =
        PerspectiveOps.convertNormToPixel(param.left, normLeft.x, normLeft.y, null);
    Point2D_F64 pixelRight =
        PerspectiveOps.convertNormToPixel(param.right, normRight.x, normRight.y, null);

    // take in account lens distortion
    Point2Transform2_F32 distLeft =
        LensDistortionOps.transformPoint(param.left).distort_F32(true, true);
    Point2Transform2_F32 distRight =
        LensDistortionOps.transformPoint(param.right).distort_F32(true, true);

    Point2D_F32 lensLeft = new Point2D_F32();
    Point2D_F32 lensRight = new Point2D_F32();

    distLeft.compute((float) pixelLeft.x, (float) pixelLeft.y, lensLeft);
    distRight.compute((float) pixelRight.x, (float) pixelRight.y, lensRight);

    // output solution
    left.set(lensLeft.x, lensLeft.y);
    right.set(lensRight.x, lensRight.y);
  }
    @Override
    public boolean process(GrayF32 input) {

      Se3_F64 t2c = targetToCamera.get(count++);

      set = new CalibrationObservation();

      for (int i = 0; i < layout.size(); i++) {
        Point2D_F64 p2 = layout.get(i);
        // location of calibration point on the target
        Point3D_F64 p3 = new Point3D_F64(p2.x, p2.y, 0);

        Point3D_F64 a = SePointOps_F64.transform(t2c, p3, null);

        Point2D_F64 pixel = new Point2D_F64();
        normToPixel.compute(a.x / a.z, a.y / a.z, pixel);

        if (pixel.x < 0
            || pixel.x >= intrinsic.width - 1
            || pixel.y < 0
            || pixel.y >= input.height - 1)
          throw new RuntimeException("Adjust test setup, bad observation");

        set.add(pixel, i);
      }

      return true;
    }
  public static void checkTransform(
      List<Point2D_F64> pts, List<Point2D_F64> model, Se2_F64 modelToPts, double tol) {
    Point2D_F64 foundPt = new Point2D_F64();
    for (int i = 0; i < pts.size(); i++) {

      Point2D_F64 p = pts.get(i);
      SePointOps_F64.transform(modelToPts, p, foundPt);
      GeometryUnitTest.assertEquals(model.get(i), foundPt, tol);
    }
  }
  private void checkPlaneToWorld(PlaneNormal3D_F64 planeN) {

    planeN.getN().normalize();
    PlaneGeneral3D_F64 planeG = UtilPlane3D_F64.convert(planeN, null);

    List<Point2D_F64> points2D = UtilPoint2D_F64.random(-2, 2, 100, rand);

    Se3_F64 planeToWorld = UtilPlane3D_F64.planeToWorld(planeG, null);
    Point3D_F64 p3 = new Point3D_F64();
    Point3D_F64 l3 = new Point3D_F64();
    Point3D_F64 k3 = new Point3D_F64();
    for (Point2D_F64 p : points2D) {
      p3.set(p.x, p.y, 0);
      SePointOps_F64.transform(planeToWorld, p3, l3);

      // see if it created a valid transform
      SePointOps_F64.transformReverse(planeToWorld, l3, k3);
      assertEquals(0, k3.distance(p3), GrlConstants.DOUBLE_TEST_TOL);

      assertEquals(0, UtilPlane3D_F64.evaluate(planeG, l3), GrlConstants.DOUBLE_TEST_TOL);
    }
  }
  public void process(CalibratedPoseAndPoint model, double[] output) {
    int outputIndex = 0;

    for (int view = 0; view < model.getNumViews(); view++) {
      Se3_F64 worldToCamera = model.getWorldToCamera(view);

      FastQueue<PointIndexObservation> observedPts = observations.get(view).getPoints();

      for (int i = 0; i < observedPts.size; i++) {
        PointIndexObservation o = observedPts.data[i];

        Point3D_F64 worldPt = model.getPoint(o.pointIndex);

        SePointOps_F64.transform(worldToCamera, worldPt, cameraPt);

        output[outputIndex++] = cameraPt.x / cameraPt.z - o.obs.x;
        output[outputIndex++] = cameraPt.y / cameraPt.z - o.obs.y;
      }
    }
  }