@Override
  protected boolean step() {
    // Search along the gradient for the minimum value
    Vector xold = this.result.getInput();

    this.result =
        this.lineMinimizer.minimizeAlongDirection(
            this.lineFunction, this.result.getOutput(), this.gradient);

    Vector xnew = this.result.getInput();
    double fnew = this.result.getOutput();

    this.lineFunction.setVectorOffset(xnew);

    // Let's cache some values for speed
    Vector gradientOld = this.gradient;

    // See if I've already computed the gradient information
    // NOTE: It's possible that there's still an inefficiency here.
    // For example, we could have computed the gradient for "xnew"
    // previous to the last evaluation.  But this would require a
    // tremendous amount of bookkeeping and memory.
    if ((this.lineFunction.getLastGradient() != null)
        && (this.lineFunction.getLastGradient().getInput().equals(xnew))) {
      this.gradient = this.lineFunction.getLastGradient().getOutput();
    } else {
      this.gradient = this.data.differentiate(xnew);
    }

    // Start caching vectors and dot products for the BFGS update...
    // this notation is taken from Wikipedia
    Vector gamma = this.gradient.minus(gradientOld);
    Vector delta = xnew.minus(xold);

    // If we've converged on zero slope, then we're done!
    if (MinimizationStoppingCriterion.convergence(
        xnew, fnew, this.gradient, delta, this.getTolerance())) {
      return false;
    }

    // Call the particular Quasi-Newton update rule
    this.updateHessianInverse(this.hessianInverse, delta, gamma);
    this.lineFunction.setDirection(this.hessianInverse.times(this.gradient).scale(-1.0));

    return true;
  }
  @Override
  public void measure(MultivariateGaussian belief, Vector observation) {

    final Matrix C = this.model.getC();

    // Figure out what the model says the observation should be
    final Vector xpred = belief.getMean();
    final Vector ypred = C.times(xpred);

    // Update step... compute the difference between the observation
    // and what the model says.
    // Then compute the Kalman gain, which essentially indicates
    // how much to believe the observation, and how much to believe model
    final Vector innovation = observation.minus(ypred);
    this.computeMeasurementBelief(belief, innovation, C);

    // XXX covariance was set in the previous call
    // if (!checkPosDef((DenseMatrix)belief.getCovariance()))
    // return;

  }
  /**
   * Test of learn method, of class gov.sandia.cognition.learning.pca.PrincipalComponentsAnalysis.
   *
   * <p>The example data is based on: http://www.kernel-machines.org/code/kpca_toy.m
   */
  public void testPCALearn() {
    System.out.println("PCA.learn");

    int num = random.nextInt(100) + 10;
    ArrayList<Vector> data = new ArrayList<Vector>(num);
    final double r1 = random.nextDouble();
    final double r2 = r1 / random.nextDouble();
    for (int i = 0; i < num; i++) {
      data.add(VectorFactory.getDefault().createUniformRandom(INPUT_DIM, r1, r2, random));
    }

    Vector mean = MultivariateStatisticsUtil.computeMean(data);

    DenseMatrix X = DenseMatrixFactoryMTJ.INSTANCE.createMatrix(INPUT_DIM, num);
    for (int n = 0; n < num; n++) {
      X.setColumn(n, data.get(n).minus(mean));
    }

    final ArrayList<Vector> dataCopy = ObjectUtil.cloneSmartElementsAsArrayList(data);

    long startsvd = System.currentTimeMillis();
    SingularValueDecomposition svd = SingularValueDecompositionMTJ.create(X);
    long stopsvd = System.currentTimeMillis();

    long start = System.currentTimeMillis();
    PrincipalComponentsAnalysis instance = this.createPCAInstance();
    PrincipalComponentsAnalysisFunction f = instance.learn(data);
    long stop = System.currentTimeMillis();

    assertEquals(dataCopy, data);

    System.out.println("Uhat:\n" + f.getDimensionReducer().getDiscriminant().transpose());
    System.out.println("U:\n" + svd.getU());

    System.out.println("Time taken: SVD = " + (stopsvd - startsvd) + ", PCA = " + (stop - start));

    // Make sure the PCA algorithm subtracted off the sample mean
    if (mean.equals(f.getMean(), 1e-5) == false) {
      assertEquals(mean, f.getMean());
    }

    assertEquals(OUTPUT_DIM, instance.getNumComponents());
    assertEquals(instance.getNumComponents(), f.getOutputDimensionality());
    assertEquals(INPUT_DIM, f.getInputDimensionality());

    if (mean.equals(f.getMean(), 1e-5) == false) {
      assertEquals(mean, f.getMean());
    }

    double absnorm = 0.0;
    int nc = instance.getNumComponents() * INPUT_DIM;
    for (int i = 0; i < instance.getNumComponents(); i++) {
      Vector uihat = f.getDimensionReducer().getDiscriminant().getRow(i);
      for (int j = 0; j < i; j++) {
        Vector ujhat = f.getDimensionReducer().getDiscriminant().getRow(j);
        assertEquals(
            "Dot product between " + i + " and " + j + " is too large!",
            0.0,
            uihat.dotProduct(ujhat),
            1e-2);
      }
      assertEquals(1.0, uihat.norm2(), 1e-5);
      Vector ui = svd.getU().getColumn(i);
      absnorm += Math.min(ui.minus(uihat).norm2(), ui.minus(uihat.scale(-1)).norm2());
    }
    absnorm /= nc;

    System.out.println("U 1-norm: " + absnorm);
    assertEquals(0.0, absnorm, 1e-1);
  }