コード例 #1
0
 /**
  * Calculates beta by GLS.
  *
  * <pre>
  *  b=(X' Omega^-1 X)^-1X'Omega^-1 y
  * </pre>
  *
  * @return beta
  */
 @Override
 protected RealVector calculateBeta() {
   RealMatrix OI = getOmegaInverse();
   RealMatrix XT = X.transpose();
   RealMatrix XTOIX = XT.multiply(OI).multiply(X);
   RealMatrix inverse = new LUDecompositionImpl(XTOIX).getSolver().getInverse();
   return inverse.multiply(XT).multiply(OI).operate(Y);
 }
コード例 #2
0
 /**
  * Calculates the variance-covariance matrix of the regression parameters.
  *
  * <p>Var(b) = (X<sup>T</sup>X)<sup>-1</sup>
  *
  * <p>Uses QR decomposition to reduce (X<sup>T</sup>X)<sup>-1</sup> to
  * (R<sup>T</sup>R)<sup>-1</sup>, with only the top p rows of R included, where p = the length of
  * the beta vector.
  *
  * @return The beta variance-covariance matrix
  */
 @Override
 protected RealMatrix calculateBetaVariance() {
   int p = X.getColumnDimension();
   RealMatrix Raug = qr.getR().getSubMatrix(0, p - 1, 0, p - 1);
   RealMatrix Rinv = new LUDecompositionImpl(Raug).getSolver().getInverse();
   return Rinv.multiply(Rinv.transpose());
 }
コード例 #3
0
  /**
   * Compute the "hat" matrix.
   *
   * <p>The hat matrix is defined in terms of the design matrix X by
   * X(X<sup>T</sup>X)<sup>-1</sup>X<sup>T</sup>
   *
   * <p>The implementation here uses the QR decomposition to compute the hat matrix as Q
   * I<sub>p</sub>Q<sup>T</sup> where I<sub>p</sub> is the p-dimensional identity matrix augmented
   * by 0's. This computational formula is from "The Hat Matrix in Regression and ANOVA", David C.
   * Hoaglin and Roy E. Welsch, <i>The American Statistician</i>, Vol. 32, No. 1 (Feb., 1978), pp.
   * 17-22.
   *
   * @return the hat matrix
   */
  public RealMatrix calculateHat() {
    // Create augmented identity matrix
    RealMatrix Q = qr.getQ();
    final int p = qr.getR().getColumnDimension();
    final int n = Q.getColumnDimension();
    Array2DRowRealMatrix augI = new Array2DRowRealMatrix(n, n);
    double[][] augIData = augI.getDataRef();
    for (int i = 0; i < n; i++) {
      for (int j = 0; j < n; j++) {
        if (i == j && i < p) {
          augIData[i][j] = 1d;
        } else {
          augIData[i][j] = 0d;
        }
      }
    }

    // Compute and return Hat matrix
    return Q.multiply(augI).multiply(Q.transpose());
  }
コード例 #4
0
 /**
  * Calculates the residuals of multiple linear regression in matrix notation.
  *
  * <pre>
  * u = y - X * b
  * </pre>
  *
  * @return The residuals [n,1] matrix
  */
 protected RealMatrix calculateResiduals() {
   RealMatrix b = calculateBeta();
   return Y.subtract(X.multiply(b));
 }
コード例 #5
0
 /** {@inheritDoc} */
 public double[] estimateResiduals() {
   RealMatrix b = calculateBeta();
   RealMatrix e = Y.subtract(X.multiply(b));
   return e.getColumn(0);
 }