Esempio n. 1
0
  private float derivEfAlpha(int i) {
    float res = 0.0f, xIn, yIn, xA, yA, tmp;

    for (int j = 0; j < k; j++) {
      xIn = (float) inputFeatPts.getEntry(j, 0);
      yIn = (float) inputFeatPts.getEntry(j, 1);
      xA = (float) averageFeatPts.getEntry(j, 0);
      yA = (float) averageFeatPts.getEntry(j, 1);

      tmp = 0.0f;
      for (int a = 0; a < 60; a++) {
        // tmp += alpha[a] * (subFSV[a][j][0] + subFSV[a][j][2]);
        tmp +=
            alpha[a]
                * (s[(landmarks83Index[j] - 1) * 3][a] + s[(landmarks83Index[j] - 1) * 3 + 2][a]);
      }
      res +=
          -2.0f
              * ((xIn + yIn) - (xA + yA) - tmp)
              * alpha[i]
              // * (subFSV[i][j][0] + subFSV[i][j][2]);
              * (s[(landmarks83Index[j] - 1) * 3][i] + s[(landmarks83Index[j] - 1) * 3 + 2][i]);
    }
    return res;
  }
Esempio n. 2
0
  @Test
  public void testInverseIdentity() {
    double tol = 0.00001;
    Map<Key, Value> input = new TreeMap<>(TestUtil.COMPARE_KEY_TO_COLQ);
    input.put(new Key("1", "", "1"), new Value("1".getBytes()));
    //    input.put(new Key("1", "", "2"), new Value("1".getBytes()));
    //    input.put(new Key("2", "", "1"), new Value("1".getBytes()));
    input.put(new Key("2", "", "2"), new Value("1".getBytes()));

    RealMatrix matrix = MemMatrixUtil.buildMatrix(input.entrySet().iterator(), 2);
    Assert.assertEquals(2, matrix.getRowDimension());
    Assert.assertEquals(2, matrix.getColumnDimension());
    Assert.assertEquals(1, matrix.getEntry(0, 0), tol);
    Assert.assertEquals(0, matrix.getEntry(0, 1), tol);
    Assert.assertEquals(0, matrix.getEntry(1, 0), tol);
    Assert.assertEquals(1, matrix.getEntry(1, 1), tol);

    matrix = MemMatrixUtil.doInverse(matrix, -1);
    Assert.assertEquals(2, matrix.getRowDimension());
    Assert.assertEquals(2, matrix.getColumnDimension());
    Assert.assertEquals(1, matrix.getEntry(0, 0), tol);
    Assert.assertEquals(0, matrix.getEntry(0, 1), tol);
    Assert.assertEquals(0, matrix.getEntry(1, 0), tol);
    Assert.assertEquals(1, matrix.getEntry(1, 1), tol);

    SortedMap<Key, Value> back =
        MemMatrixUtil.matrixToMap(new TreeMap<Key, Value>(TestUtil.COMPARE_KEY_TO_COLQ), matrix);
    TestUtil.assertEqualDoubleMap(input, back);
    //    Assert.assertEquals(1, Double.parseDouble(new String(back.get(new Key("1", "",
    // "1")).get())), tol);
    //    Assert.assertEquals(1, Double.parseDouble(new String(back.get(new Key("2", "",
    // "2")).get())), tol);
  }
Esempio n. 3
0
  @Test
  public void testInverse2x2() {
    double tol = 0.001;
    Map<Key, Value> input = new TreeMap<>(TestUtil.COMPARE_KEY_TO_COLQ);
    input.put(new Key("1", "", "1"), new Value("4".getBytes()));
    input.put(new Key("1", "", "2"), new Value("3".getBytes()));
    input.put(new Key("2", "", "1"), new Value("1".getBytes()));
    input.put(new Key("2", "", "2"), new Value("1".getBytes()));
    Map<Key, Value> expect = new TreeMap<>(TestUtil.COMPARE_KEY_TO_COLQ);
    expect.put(new Key("1", "", "1"), new Value("1 ".getBytes()));
    expect.put(new Key("1", "", "2"), new Value("-3".getBytes()));
    expect.put(new Key("2", "", "1"), new Value("-1".getBytes()));
    expect.put(new Key("2", "", "2"), new Value("4 ".getBytes()));

    RealMatrix matrix = MemMatrixUtil.buildMatrix(input.entrySet().iterator(), 2);
    Assert.assertEquals(2, matrix.getRowDimension());
    Assert.assertEquals(2, matrix.getColumnDimension());
    Assert.assertEquals(4, matrix.getEntry(0, 0), tol);
    Assert.assertEquals(3, matrix.getEntry(0, 1), tol);
    Assert.assertEquals(1, matrix.getEntry(1, 0), tol);
    Assert.assertEquals(1, matrix.getEntry(1, 1), tol);

    matrix = MemMatrixUtil.doInverse(matrix, -1);
    Assert.assertEquals(2, matrix.getRowDimension());
    Assert.assertEquals(2, matrix.getColumnDimension());
    Assert.assertEquals(1, matrix.getEntry(0, 0), tol);
    Assert.assertEquals(-3, matrix.getEntry(0, 1), tol);
    Assert.assertEquals(-1, matrix.getEntry(1, 0), tol);
    Assert.assertEquals(4, matrix.getEntry(1, 1), tol);

    SortedMap<Key, Value> back =
        MemMatrixUtil.matrixToMap(new TreeMap<Key, Value>(TestUtil.COMPARE_KEY_TO_COLQ), matrix);
    TestUtil.assertEqualDoubleMap(expect, back);
  }
 public double[][] residuals() {
   double[][] resid = new double[nItems][nItems];
   for (int i = 0; i < SIGMA.getRowDimension(); i++) {
     for (int j = 0; j < SIGMA.getColumnDimension(); j++) {
       resid[i][j] = varcov.getEntry(i, j) - SIGMA.getEntry(i, j);
     }
   }
   return resid;
 }
Esempio n. 5
0
 /////////////////////////////////// Equation 18 ////////////////////////////////////////////////
 private float computeEf() {
   float res = 0.0f;
   for (int j = 0; j < k; j++) {
     res +=
         pow(inputFeatPts.getEntry(j, 0) - modelFeatPts.getEntry(j, 0), 2)
             + pow(inputFeatPts.getEntry(j, 1) - modelFeatPts.getEntry(j, 1), 2);
   }
   return res;
 }
 public double[][] squaredResiduals() {
   double[][] resid = new double[nItems][nItems];
   double temp = 0.0;
   for (int i = 0; i < SIGMA.getRowDimension(); i++) {
     for (int j = 0; j < SIGMA.getColumnDimension(); j++) {
       temp = varcov.getEntry(i, j) - SIGMA.getEntry(i, j);
       resid[i][j] = temp * temp;
     }
   }
   return resid;
 }
 public double meanSquaredResidual() {
   double ni = Double.valueOf(nItems).doubleValue();
   double temp = 0.0, sum = 0.0;
   for (int i = 0; i < SIGMA.getRowDimension(); i++) {
     for (int j = 0; j < SIGMA.getColumnDimension(); j++) {
       temp = varcov.getEntry(i, j) - SIGMA.getEntry(i, j);
       sum += temp * temp;
     }
   }
   return sum / (ni * ni);
 }
    /**
     * Gradient
     *
     * @param x a <code>double[]</code> input vector
     * @return
     */
    @Override
    public double[] gradient(double[] x) {
      double[] sqrtPsi = new double[nVariables];
      double[] invSqrtPsi = new double[nVariables];
      for (int i = 0; i < nVariables; i++) {
        x[i] = Math.max(0.005, x[i]); // ensure that no parameters are negative
        sqrtPsi[i] = Math.sqrt(x[i]);
        invSqrtPsi[i] = 1.0 / Math.sqrt(x[i]);
      }
      DiagonalMatrix diagPsi = new DiagonalMatrix(x);
      DiagonalMatrix diagSqtPsi = new DiagonalMatrix(sqrtPsi);
      DiagonalMatrix SC = new DiagonalMatrix(invSqrtPsi);

      RealMatrix Sstar = SC.multiply(R2).multiply(SC);

      EigenDecomposition E = new EigenDecomposition(Sstar);
      RealMatrix L = E.getV().getSubMatrix(0, nVariables - 1, 0, nFactors - 1);
      double[] ev = new double[nFactors];
      for (int i = 0; i < nFactors; i++) {
        ev[i] = Math.sqrt(Math.max(E.getRealEigenvalue(i) - 1, 0));
      }
      DiagonalMatrix M = new DiagonalMatrix(ev);
      RealMatrix LOAD = L.multiply(M);

      RealMatrix LL = diagSqtPsi.multiply(LOAD);
      RealMatrix G = LL.multiply(LL.transpose()).add(diagPsi).subtract(R2);

      double[] gradient = new double[nVariables];
      for (int i = 0; i < nVariables; i++) {
        gradient[i] = G.getEntry(i, i) / (x[i] * x[i]);
      }
      return gradient;
    }
  /**
   * Calculates {@code P(D_n < d)} using method described in [1] and doubles (see above).
   *
   * @param d statistic
   * @return the two-sided probability of {@code P(D_n < d)}
   * @throws MathArithmeticException if algorithm fails to convert {@code h} to a {@link
   *     org.apache.commons.math3.fraction.BigFraction} in expressing {@code d} as {@code (k - h) /
   *     m} for integer {@code k, m} and {@code 0 <= h < 1}.
   */
  private double roundedK(double d) throws MathArithmeticException {

    final int k = (int) FastMath.ceil(n * d);
    final FieldMatrix<BigFraction> HBigFraction = this.createH(d);
    final int m = HBigFraction.getRowDimension();

    /*
     * Here the rounding part comes into play: use
     * RealMatrix instead of FieldMatrix<BigFraction>
     */
    final RealMatrix H = new Array2DRowRealMatrix(m, m);

    for (int i = 0; i < m; ++i) {
      for (int j = 0; j < m; ++j) {
        H.setEntry(i, j, HBigFraction.getEntry(i, j).doubleValue());
      }
    }

    final RealMatrix Hpower = H.power(n);

    double pFrac = Hpower.getEntry(k - 1, k - 1);

    for (int i = 1; i <= n; ++i) {
      pFrac *= (double) i / (double) n;
    }

    return pFrac;
  }
Esempio n. 10
0
 /**
  * Derives a correlation matrix from a covariance matrix.
  *
  * <p>Uses the formula <br>
  * <code>r(X,Y) = cov(X,Y)/s(X)s(Y)</code> where <code>r(&middot,&middot;)</code> is the
  * correlation coefficient and <code>s(&middot;)</code> means standard deviation.
  *
  * @param covarianceMatrix the covariance matrix
  * @return correlation matrix
  */
 public RealMatrix covarianceToCorrelation(RealMatrix covarianceMatrix) {
   int nVars = covarianceMatrix.getColumnDimension();
   RealMatrix outMatrix = new BlockRealMatrix(nVars, nVars);
   for (int i = 0; i < nVars; i++) {
     double sigma = FastMath.sqrt(covarianceMatrix.getEntry(i, i));
     outMatrix.setEntry(i, i, 1d);
     for (int j = 0; j < i; j++) {
       double entry =
           covarianceMatrix.getEntry(i, j)
               / (sigma * FastMath.sqrt(covarianceMatrix.getEntry(j, j)));
       outMatrix.setEntry(i, j, entry);
       outMatrix.setEntry(j, i, entry);
     }
   }
   return outMatrix;
 }
 public double sumMatrix(RealMatrix matrix) {
   double sum = 0.0;
   for (int i = 0; i < matrix.getRowDimension(); i++) {
     for (int j = 0; j < matrix.getColumnDimension(); j++) {
       sum += matrix.getEntry(i, j);
     }
   }
   return sum;
 }
Esempio n. 12
0
  // Berechnung des Zustandswertes (Gebaeudezustand) des Interpolationspunktes
  public double calculateIntValue() {
    double value = 0;

    for (int i = 0; i < numOfPoints; i++) {
      value = value + weightVector.getEntry(i, 0) * points.get(i).data;
    }

    return value;
  }
    public double valueAt(double[] param) {
      double[] sdInv = new double[nVariables];

      for (int i = 0; i < nVariables; i++) {
        R.setEntry(i, i, 1.0 - param[i]);
        sdInv[i] = 1.0 / Sinv.getEntry(i, i);
      }

      DiagonalMatrix diagSdInv = new DiagonalMatrix(sdInv);

      EigenDecomposition eigen = new EigenDecomposition(R);
      RealMatrix eigenVectors = eigen.getV().getSubMatrix(0, nVariables - 1, 0, nFactors - 1);

      double[] ev = new double[nFactors];
      for (int i = 0; i < nFactors; i++) {
        ev[i] = Math.sqrt(eigen.getRealEigenvalue(i));
      }
      DiagonalMatrix evMatrix =
          new DiagonalMatrix(
              ev); // USE Apache version of Diagonal matrix when upgrade to version 3.2
      RealMatrix LAMBDA = eigenVectors.multiply(evMatrix);
      RealMatrix SIGMA = (LAMBDA.multiply(LAMBDA.transpose()));

      double value = 0.0;
      RealMatrix DIF = R.subtract(SIGMA);
      for (int i = 0; i < DIF.getRowDimension(); i++) {
        for (int j = 0; j < DIF.getColumnDimension(); j++) {
          value = DIF.getEntry(i, j);
          DIF.setEntry(i, j, Math.pow(value, 2));
        }
      }

      RealMatrix RESID = diagSdInv.multiply(DIF).multiply(diagSdInv);

      double sum = 0.0;
      for (int i = 0; i < RESID.getRowDimension(); i++) {
        for (int j = 0; j < RESID.getColumnDimension(); j++) {
          sum += RESID.getEntry(i, j);
        }
      }
      return sum;
    }
Esempio n. 14
0
 /**
  * Returns a matrix of standard errors associated with the estimates in the correlation matrix.
  * <br>
  * <code>getCorrelationStandardErrors().getEntry(i,j)</code> is the standard error associated with
  * <code>getCorrelationMatrix.getEntry(i,j)</code>
  *
  * <p>The formula used to compute the standard error is <br>
  * <code>SE<sub>r</sub> = ((1 - r<sup>2</sup>) / (n - 2))<sup>1/2</sup></code> where <code>r
  * </code> is the estimated correlation coefficient and <code>n</code> is the number of
  * observations in the source dataset.
  *
  * <p>To use this method, one of the constructors that supply an input matrix must have been used
  * to create this instance.
  *
  * @return matrix of correlation standard errors
  * @throws NullPointerException if this instance was created with no data
  */
 public RealMatrix getCorrelationStandardErrors() {
   int nVars = correlationMatrix.getColumnDimension();
   double[][] out = new double[nVars][nVars];
   for (int i = 0; i < nVars; i++) {
     for (int j = 0; j < nVars; j++) {
       double r = correlationMatrix.getEntry(i, j);
       out[i][j] = FastMath.sqrt((1 - r * r) / (nObs - 2));
     }
   }
   return new BlockRealMatrix(out);
 }
 public double sumSquaredElements(RealMatrix matrix) {
   double sum = 0.0;
   double v = 0.0;
   for (int i = 0; i < matrix.getRowDimension(); i++) {
     for (int j = 0; j < matrix.getColumnDimension(); j++) {
       v = matrix.getEntry(i, j);
       sum += (v * v);
     }
   }
   return sum;
 }
Esempio n. 16
0
 @Test
 public void testTransposeTimesSelf() {
   Map<Integer, float[]> a = new HashMap<>();
   a.put(-1, new float[] {1.3f, -2.0f, 3.0f});
   a.put(1, new float[] {2.0f, 0.0f, 5.0f});
   a.put(3, new float[] {0.0f, -1.5f, 5.5f});
   RealMatrix ata = VectorMath.transposeTimesSelf(a.values());
   RealMatrix expected =
       new Array2DRowRealMatrix(
           new double[][] {
             {5.69, -2.6, 13.9},
             {-2.6, 6.25, -14.25},
             {13.9, -14.25, 64.25}
           });
   for (int row = 0; row < 3; row++) {
     for (int col = 0; col < 3; col++) {
       assertEquals(expected.getEntry(row, col), ata.getEntry(row, col), FLOAT_EPSILON);
     }
   }
 }
  // Makes and scales the matrices V, D, and VT (to avoid ugly decimals)
  private void makeVDVT(EigenDecomp ed) {
    V = ed.getV();
    D = ed.getD();
    VT = ed.getVT();
    double ref = 0;

    for (int i = 0; i < V.getRowDimension(); i++) {
      ref = 0;
      for (int j = 0; j < V.getColumnDimension(); j++) {
        if (V.getEntry(j, i) != 0 && ref == 0) {
          ref = V.getEntry(j, i);
        }
        if (ref != 0) {
          V.setEntry(j, i, V.getEntry(j, i) / Math.abs(ref));
        }
      }
    }

    for (int i = 0; i < VT.getRowDimension(); i++) {
      ref = 0;
      for (int j = 0; j < VT.getColumnDimension(); j++) {
        if (VT.getEntry(j, i) != 0 && ref == 0) {
          ref = VT.getEntry(j, i);
        }
        if (ref != 0) {
          VT.setEntry(j, i, VT.getEntry(j, i) / Math.abs(ref));
        }
      }
    }
  }
 @Override
 public void train(List<Instance> instances) {
   // ------------------------ initialize rows and columns ---------------------
   int rows = instances.size();
   int columns = 0;
   // get max columns
   for (Instance i : instances) {
     int localColumns = Collections.max(i.getFeatureVector().getFeatureMap().keySet());
     if (localColumns > columns) columns = localColumns;
   }
   // ------------------------ initialize alpha vector -----------------------
   alpha = new ArrayRealVector(rows, 0);
   // ------------------------ initialize base X and Y for use --------------------------
   double[][] X = new double[rows][columns];
   double[] Y = new double[rows];
   for (int i = 0; i < rows; i++) {
     Y[i] = ((ClassificationLabel) instances.get(i).getLabel()).getLabelValue();
     for (int j = 0; j < columns; j++) {
       X[i][j] = instances.get(i).getFeatureVector().get(j + 1);
     }
   }
   // ---------------------- gram matrix -------------------
   matrixX = new Array2DRowRealMatrix(X);
   RealMatrix gram = new Array2DRowRealMatrix(rows, rows);
   for (int i = 0; i < rows; i++) {
     for (int j = 0; j < rows; j++) {
       gram.setEntry(i, j, kernelFunction(matrixX.getRowVector(i), matrixX.getRowVector(j)));
     }
   }
   // ---------------------- gradient ascent --------------------------
   Sigmoid g = new Sigmoid(); // helper function
   System.out.println("Training start...");
   System.out.println(
       "Learning rate: " + _learning_rate + " Training times: " + _training_iterations);
   for (int idx = 0; idx < _training_iterations; idx++) {
     System.out.println("Training iteration: " + (idx + 1));
     for (int k = 0; k < rows; k++) {
       double gradient_ascent = 0.0;
       RealVector alpha_gram = gram.operate(alpha);
       for (int i = 0; i < rows; i++) {
         double lambda = alpha_gram.getEntry(i);
         double kernel = gram.getEntry(i, k);
         gradient_ascent =
             gradient_ascent
                 + Y[i] * g.value(-lambda) * kernel
                 + (1 - Y[i]) * g.value(lambda) * (-kernel);
       }
       alpha.setEntry(k, alpha.getEntry(k) + _learning_rate * gradient_ascent);
     }
   }
   System.out.println("Training done!");
 }
 private RealMatrix normalizeData(RealMatrix matrix, UserProfileEigenModel model) {
   RealMatrix normalizedData =
       new Array2DRowRealMatrix(matrix.getRowDimension(), matrix.getColumnDimension());
   if (LOG.isDebugEnabled()) LOG.debug("model statistics size: " + model.statistics().length);
   for (int i = 0; i < matrix.getRowDimension(); i++) {
     for (int j = 0; j < matrix.getColumnDimension(); j++) {
       double value =
           (matrix.getEntry(i, j) - model.statistics()[j].getMean())
               / model.statistics()[j].getStddev();
       normalizedData.setEntry(i, j, value);
     }
   }
   return normalizedData;
 }
Esempio n. 20
0
 private static double[] calculateColumnInverseMeans(RealMatrix matrix) {
   return IntStream.range(0, matrix.getColumnDimension())
       .mapToDouble(
           i ->
               1.0
                   / IntStream.range(0, matrix.getRowDimension())
                       .mapToDouble(j -> matrix.getEntry(j, i))
                       .average()
                       .orElseThrow(
                           () ->
                               new IllegalArgumentException(
                                   "cannot calculate a average for column " + i)))
       .toArray();
 }
  public double[] getStartValues() {
    double[] start = new double[nVariables];

    if (nFactors == 1) {
      for (int i = 0; i < nVariables; i++) {
        start[i] = 0.5;
      }
    } else {
      for (int i = 0; i < nVariables; i++) {
        start[i] = Math.min(1.0 / Sinv.getEntry(i, i), 1.0);
      }
    }

    return start;
  }
Esempio n. 22
0
 private RealMatrix removeZeroColumns(RealMatrix base, List<Integer> zeroColumns) {
   int adjustedDim = base.getRowDimension() - zeroColumns.size();
   if (adjustedDim == 0) return base;
   RealMatrix adjusted = new Array2DRowRealMatrix(adjustedDim, adjustedDim);
   int i = 0, j = 0;
   for (int basei = 0; basei < base.getRowDimension(); basei++) {
     if (zeroColumns.contains(basei)) continue;
     for (int basej = 0; basej < base.getColumnDimension(); basej++) {
       if (zeroColumns.contains(basej)) continue;
       adjusted.setEntry(i, j++, base.getEntry(basei, basej));
     }
     i++;
     j = 0;
   }
   return adjusted;
 }
Esempio n. 23
0
 /**
  * Returns a matrix of p-values associated with the (two-sided) null hypothesis that the
  * corresponding correlation coefficient is zero.
  *
  * <p><code>getCorrelationPValues().getEntry(i,j)</code> is the probability that a random variable
  * distributed as <code>t<sub>n-2</sub></code> takes a value with absolute value greater than or
  * equal to <br>
  * <code>|r|((n - 2) / (1 - r<sup>2</sup>))<sup>1/2</sup></code>
  *
  * <p>The values in the matrix are sometimes referred to as the <i>significance</i> of the
  * corresponding correlation coefficients.
  *
  * <p>To use this method, one of the constructors that supply an input matrix must have been used
  * to create this instance.
  *
  * @return matrix of p-values
  * @throws org.apache.commons.math3.exception.MaxCountExceededException if an error occurs
  *     estimating probabilities
  * @throws NullPointerException if this instance was created with no data
  */
 public RealMatrix getCorrelationPValues() {
   TDistribution tDistribution = new TDistribution(nObs - 2);
   int nVars = correlationMatrix.getColumnDimension();
   double[][] out = new double[nVars][nVars];
   for (int i = 0; i < nVars; i++) {
     for (int j = 0; j < nVars; j++) {
       if (i == j) {
         out[i][j] = 0d;
       } else {
         double r = correlationMatrix.getEntry(i, j);
         double t = FastMath.abs(r * FastMath.sqrt((nObs - 2) / (1 - r * r)));
         out[i][j] = 2 * tDistribution.cumulativeProbability(-t);
       }
     }
   }
   return new BlockRealMatrix(out);
 }
  private static RealMatrix T6(double t) {
    double theta, thetaN, magThetaD;

    // Earth-Sun vector in GSE
    Coordinate GSE_ES = new Coordinate("GSE", 1, 0, 0, t);
    // Convert GSE --> GEI
    Coordinate GEI = toGEI(GSE_ES);
    RealMatrix GEI_ES = CoordinateToMatrix(GEI);

    thetaN =
        GEI_ES.getEntry(0, 0)
            * (-0.032 + GEI_ES.getEntry(1, 0) * -0.112 + GEI_ES.getEntry(2, 0) * -0.048);
    Vector3D ThetaD =
        new Vector3D(
            -0.424 * GEI_ES.getEntry(2, 0) - 0.897 * GEI_ES.getEntry(1, 0),
            0.897 * GEI_ES.getEntry(0, 0) - 0.1217 * GEI_ES.getEntry(2, 0),
            0.1217 * GEI_ES.getEntry(1, 0) - -0.424 * GEI_ES.getEntry(0, 0));
    magThetaD = ThetaD.getNorm();
    theta = Math.asin(thetaN / magThetaD);

    RealMatrix T6 = RotationMatrix(Math.toDegrees(theta), new Vector3D(1, 0, 0));
    return T6.transpose(); // unknown why transpose is needed to match previous results
  }
  @Override
  public List<MLCallbackResult> detect(
      final String user,
      final String algorithm,
      UserActivityAggModel userActivity,
      UserProfileEigenModel aModel) {
    RealMatrix inputData = userActivity.matrix();
    LOG.warn(
        "EigenBasedAnomalyDetection predictAnomaly called with dimension: "
            + inputData.getRowDimension()
            + "x"
            + inputData.getColumnDimension());

    if (aModel == null) {
      LOG.warn(
          "nothing to do as the input model does not have required values, returning from evaluating this algorithm..");
      return null;
    }

    List<MLCallbackResult> mlCallbackResults = new ArrayList<MLCallbackResult>();
    RealMatrix normalizedMat = normalizeData(inputData, aModel);

    UserCommandStatistics[] listStats = aModel.statistics();
    int colWithHighVariant = 0;

    for (int j = 0; j < normalizedMat.getColumnDimension(); j++) {
      if (listStats[j].isLowVariant() == false) {
        colWithHighVariant++;
      }
    }

    final Map<String, String> context =
        new HashMap<String, String>() {
          {
            put(UserProfileConstants.USER_TAG, user);
            put(UserProfileConstants.ALGORITHM_TAG, algorithm);
          }
        };

    Map<Integer, String> lineNoWithVariantBasedAnomalyDetection = new HashMap<Integer, String>();
    for (int i = 0; i < normalizedMat.getRowDimension(); i++) {
      MLCallbackResult aResult = new MLCallbackResult();
      aResult.setContext(context);

      for (int j = 0; j < normalizedMat.getColumnDimension(); j++) {
        // LOG.info("mean for j=" + j + " is:" + listStats[j].getMean());
        // LOG.info("stddev for j=" + j + " is:" + listStats[j].getStddev());
        if (listStats[j].isLowVariant() == true) {
          // LOG.info(listOfCmds[j] + " is low variant");
          if (normalizedMat.getEntry(i, j) > listStats[j].getMean()) {
            lineNoWithVariantBasedAnomalyDetection.put(i, "lowVariantAnomaly");
            aResult.setAnomaly(true);
            aResult.setTimestamp(userActivity.timestamp());
            aResult.setFeature(listStats[j].getCommandName());
            aResult.setAlgorithm(UserProfileConstants.EIGEN_DECOMPOSITION_ALGORITHM);
            List<String> datapoints = new ArrayList<String>();
            double[] rowVals = inputData.getRow(i);
            for (double rowVal : rowVals) datapoints.add(rowVal + "");
            aResult.setDatapoints(datapoints);
            aResult.setId(user);
            mlCallbackResults.add(aResult);
          } else {
            aResult.setAnomaly(false);
            aResult.setTimestamp(userActivity.timestamp());
            mlCallbackResults.add(aResult);
          }
        }
      }
      // return results;
    }

    // LOG.info("results size here: " + results.length);

    // LOG.info("col with high variant: " + colWithHighVariant);
    RealMatrix finalMatWithoutLowVariantFeatures =
        new Array2DRowRealMatrix(normalizedMat.getRowDimension(), colWithHighVariant);
    // LOG.info("size of final test data: " + finalMatWithoutLowVariantFeatures.getRowDimension()
    // +"x"+ finalMatWithoutLowVariantFeatures.getColumnDimension());
    int finalMatrixRow = 0;
    int finalMatrixCol = 0;
    for (int i = 0; i < normalizedMat.getRowDimension(); i++) {
      for (int j = 0; j < normalizedMat.getColumnDimension(); j++) {
        if (listStats[j].isLowVariant() == false) {
          finalMatWithoutLowVariantFeatures.setEntry(
              finalMatrixRow, finalMatrixCol, normalizedMat.getEntry(i, j));
          finalMatrixCol++;
        }
      }
      finalMatrixCol = 0;
      finalMatrixRow++;
    }
    RealVector[] pcs = aModel.principalComponents();
    // LOG.info("pc size: " + pcs.getRowDimension() +"x" + pcs.getColumnDimension());

    RealMatrix finalInputMatTranspose = finalMatWithoutLowVariantFeatures.transpose();

    for (int i = 0; i < finalMatWithoutLowVariantFeatures.getRowDimension(); i++) {
      if (lineNoWithVariantBasedAnomalyDetection.get(i) == null) {
        MLCallbackResult result = new MLCallbackResult();
        result.setContext(context);
        for (int sz = 0; sz < pcs.length; sz++) {
          double[] pc1 = pcs[sz].toArray();
          RealMatrix pc1Mat = new Array2DRowRealMatrix(pc1);
          RealMatrix transposePC1Mat = pc1Mat.transpose();
          RealMatrix testData =
              pc1Mat.multiply(transposePC1Mat).multiply(finalInputMatTranspose.getColumnMatrix(i));
          // LOG.info("testData size: " + testData.getRowDimension() + "x" +
          // testData.getColumnDimension());
          RealMatrix testDataTranspose = testData.transpose();
          // LOG.info("testData transpose size: " + testDataTranspose.getRowDimension() + "x" +
          // testDataTranspose.getColumnDimension());
          RealVector iRowVector = testDataTranspose.getRowVector(0);
          // RealVector pc1Vector = transposePC1Mat.getRowVector(sz);
          RealVector pc1Vector = transposePC1Mat.getRowVector(0);
          double distanceiRowAndPC1 = iRowVector.getDistance(pc1Vector);
          // LOG.info("distance from pc sz: " + sz + " " + distanceiRowAndPC1 + " " +
          // model.getMaxL2Norm().getEntry(sz));
          // LOG.info("model.getMaxL2Norm().getEntry(sz):" + model.getMaxL2Norm().getEntry(sz));
          if (distanceiRowAndPC1 > aModel.maximumL2Norm().getEntry(sz)) {
            // LOG.info("distance from pc sz: " + sz + " " + distanceiRowAndPC1 + " " +
            // model.getMaxL2Norm().getEntry(sz));
            result.setAnomaly(true);
            result.setFeature(aModel.statistics()[sz].getCommandName());
            result.setTimestamp(System.currentTimeMillis());
            result.setAlgorithm(UserProfileConstants.EIGEN_DECOMPOSITION_ALGORITHM);
            List<String> datapoints = new ArrayList<String>();
            double[] rowVals = inputData.getRow(i);
            for (double rowVal : rowVals) datapoints.add(rowVal + "");
            result.setDatapoints(datapoints);
            result.setId(user);
          }
        }
        mlCallbackResults.add(result);
      }
    }
    return mlCallbackResults;
  }
Esempio n. 26
0
 public double rateFor(final int currentPage, final int pageIndex) {
   return matrix.getEntry(currentPage, pageIndex);
 }
  private void computeFactorLoadings(double[] x) {
    uniqueness = x;
    communality = new double[nVariables];

    for (int i = 0; i < nVariables; i++) {
      R.setEntry(i, i, 1.0 - x[i]);
    }

    EigenDecomposition E = new EigenDecomposition(R);
    RealMatrix L = E.getV().getSubMatrix(0, nVariables - 1, 0, nFactors - 1);
    double[] ev = new double[nFactors];
    for (int i = 0; i < nFactors; i++) {
      ev[i] = Math.sqrt(E.getRealEigenvalue(i));
    }
    DiagonalMatrix M = new DiagonalMatrix(ev);
    RealMatrix LOAD = L.multiply(M);

    // rotate factor loadings
    if (rotationMethod != RotationMethod.NONE) {
      GPArotation gpa = new GPArotation();
      RotationResults results = gpa.rotate(LOAD, rotationMethod);
      LOAD = results.getFactorLoadings();
    }

    Sum[] colSums = new Sum[nFactors];
    Sum[] colSumsSquares = new Sum[nFactors];

    for (int j = 0; j < nFactors; j++) {
      colSums[j] = new Sum();
      colSumsSquares[j] = new Sum();
    }

    factorLoading = new double[nVariables][nFactors];

    for (int i = 0; i < nVariables; i++) {
      for (int j = 0; j < nFactors; j++) {
        factorLoading[i][j] = LOAD.getEntry(i, j);
        colSums[j].increment(factorLoading[i][j]);
        colSumsSquares[j].increment(Math.pow(factorLoading[i][j], 2));
        communality[i] += Math.pow(factorLoading[i][j], 2);
      }
    }

    // check sign of factor
    double sign = 1.0;
    for (int i = 0; i < nVariables; i++) {
      for (int j = 0; j < nFactors; j++) {
        if (colSums[j].getResult() < 0) {
          sign = -1.0;
        } else {
          sign = 1.0;
        }
        factorLoading[i][j] = factorLoading[i][j] * sign;
      }
    }

    double totSumOfSquares = 0.0;
    sumsOfSquares = new double[nFactors];
    proportionOfExplainedVariance = new double[nFactors];
    proportionOfVariance = new double[nFactors];
    for (int j = 0; j < nFactors; j++) {
      sumsOfSquares[j] = colSumsSquares[j].getResult();
      totSumOfSquares += sumsOfSquares[j];
    }
    for (int j = 0; j < nFactors; j++) {
      proportionOfExplainedVariance[j] = sumsOfSquares[j] / totSumOfSquares;
      proportionOfVariance[j] = sumsOfSquares[j] / nVariables;
    }
  }