private static double[] solution(DoubleMatrix2D X, DoubleMatrix2D Y, int k) { // Solve X * Beta = Y for Beta // Only the first column of Y is used // k is number of beta coefficients QRDecomposition qr = new QRDecomposition(X); if (qr.hasFullRank()) { DoubleMatrix2D B = qr.solve(Y); return B.viewColumn(0).toArray(); } else { DoubleMatrix1D Y0 = Y.viewColumn(0); // first column of Y SingularValueDecomposition svd = new SingularValueDecomposition(X); DoubleMatrix2D S = svd.getS(); DoubleMatrix2D V = svd.getV(); DoubleMatrix2D U = svd.getU(); Algebra alg = new Algebra(); DoubleMatrix2D Ut = alg.transpose(U); DoubleMatrix1D g = alg.mult(Ut, Y0); // Ut*Y0 for (int j = 0; j < k; j++) { // solve S*p = g for p; S is a diagonal matrix double x = S.getQuick(j, j); if (x > 0.) { x = g.getQuick(j) / x; // p[j] = g[j]/S[j] g.setQuick(j, x); // overwrite g by p } else g.setQuick(j, 0.); } DoubleMatrix1D beta = alg.mult(V, g); // V*p return beta.toArray(); } }
/** * Save factorisation machine in a compressed, human readable file. * * @param out output * @throws IOException when I/O error */ public void save(OutputStream out) throws IOException { int N = m.rows(); int K = m.columns(); try (ZipOutputStream zip = new ZipOutputStream(out)) { zip.putNextEntry(new ZipEntry("info")); PrintStream ps = new PrintStream(zip); ps.println(N); ps.println(K); ps.flush(); zip.closeEntry(); zip.putNextEntry(new ZipEntry("b")); ps = new PrintStream(zip); ps.println(b); ps.flush(); zip.closeEntry(); zip.putNextEntry(new ZipEntry("w")); saveDenseDoubleMatrix1D(zip, w); zip.closeEntry(); zip.putNextEntry(new ZipEntry("m")); saveDenseDoubleMatrix2D(zip, m); zip.closeEntry(); } }
private DoubleMatrix2D newFromTemplate(DoubleMatrix2D template, int rows, int columns) { if (template != null) { return template.like(rows, columns); } else { return x.like(rows, columns); } }
/** * Computes the regression coefficients using the least squares method. This is a linear * regression with a polynomial of degree `deg` @f$= k@f$ and @f$k+1@f$ regression * coefficients @f$\beta_j@f$. The model is * * @f[ y = \beta_0 + \sum_{j=1}^k \beta_j x^j. * @f] Given the @f$n@f$ data points @f$(X_i, Y_i)@f$, @f$i=0,1,…,(n-1)@f$, the method computes * and returns the array @f$[\beta_0, \beta_1, …, \beta_k]@f$. Restriction: @f$n > k@f$. * @param X the regressor variables * @param Y the response * @return the regression coefficients */ public static double[] calcCoefficients(double[] X, double[] Y, int deg) { final int n = X.length; if (n != Y.length) throw new IllegalArgumentException("Lengths of X and Y are not equal"); if (n < deg + 1) throw new IllegalArgumentException("Not enough points"); final double[] xSums = new double[2 * deg + 1]; final double[] xySums = new double[deg + 1]; xSums[0] = n; for (int i = 0; i < n; i++) { double xv = X[i]; xySums[0] += Y[i]; for (int j = 1; j <= 2 * deg; j++) { xSums[j] += xv; if (j <= deg) xySums[j] += xv * Y[i]; xv *= X[i]; } } final DoubleMatrix2D A = new DenseDoubleMatrix2D(deg + 1, deg + 1); final DoubleMatrix2D B = new DenseDoubleMatrix2D(deg + 1, 1); for (int i = 0; i <= deg; i++) { for (int j = 0; j <= deg; j++) { final int d = i + j; A.setQuick(i, j, xSums[d]); } B.setQuick(i, 0, xySums[i]); } return solution(A, B, deg + 1); }
/** * Computes the density function <SPAN CLASS="MATH"><I>f</I> (<I>x</I>)</SPAN>, with <SPAN * CLASS="MATH"><I>λ</I><SUB>i</SUB> =</SPAN> <TT>lambda[<SPAN CLASS="MATH"><I>i</I> - * 1</SPAN>]</TT>, <SPAN CLASS="MATH"><I>i</I> = 1,…, <I>k</I></SPAN>. * * @param lambda rates of the hypoexponential distribution * @param x value at which the density is evaluated * @return density at <SPAN CLASS="MATH"><I>x</I></SPAN> */ public static double density(double[] lambda, double x) { testLambda(lambda); if (x < 0) return 0; DoubleMatrix2D Ax = buildMatrix(lambda, x); DoubleMatrix2D M = DMatrix.expBidiagonal(Ax); int k = lambda.length; return lambda[k - 1] * M.getQuick(0, k - 1); }
public double apply(int first, int second, double third) { // System.out.println("checking = " + min_val + " versus " + // (third/count_matrix.getQuick(first,second))); if (third / count_matrix.getQuick(first, second) > max_val) { max_m = first; max_n = second; max_val = third / count_matrix.getQuick(first, second); } return third; }
@Override public DoubleMatrix2D transitionMatrix(double from_time, double to_time) { double from_time_reminder = from_time % 1.0; double from_time_div = from_time - from_time_reminder; double to_time_reminder = to_time - from_time_div; DoubleMatrix2D cached = cachedTransitionMatrices.get( new Pair<Double, Double>(from_time_reminder, to_time_reminder)); if (cached != null) { return cached; } else { double step_start_time = from_time; double step_end_time = step_start_time; DoubleMatrix2D result = F.identity(num_states); while (step_start_time < to_time) { double step_start_time_reminder = step_start_time % 1.0; double step_start_time_div = step_start_time - step_start_time_reminder; if (isInSeason1(step_start_time)) { step_end_time = Math.min( to_time, step_start_time_div + season1Start + season1Length + infitesimalTimeInterval); result = result.zMult( season1MigrationModel.transitionMatrix(step_start_time, step_end_time), null); } else { // In Season 2 if (step_start_time_reminder < season1Start) { step_end_time = Math.min(to_time, step_start_time_div + season1Start + infitesimalTimeInterval); } else { step_end_time = Math.min( to_time, step_start_time_div + 1.0 + season1Start + infitesimalTimeInterval); } result = result.zMult( season2MigrationModel.transitionMatrix(step_start_time, step_end_time), null); } step_start_time = step_end_time; } // cache result if (cachedTransitionMatrices.size() >= maxCachedTransitionMatrices) { for (int i = 0; i < cachedTransitionMatrices.size() / 2; i++) { cachedTransitionMatrices.remove(cachedTransitionMatrices.keySet().iterator().next()); } } cachedTransitionMatrices.put( new Pair<Double, Double>(from_time_reminder, to_time_reminder), result); return result; } }
public ColtEigenvalueDecomposition(DoubleMatrix dm) { int nrows = dm.numberOfRows(); int ncols = dm.numberOfColumns(); DoubleMatrix2D matrix = DoubleFactory2D.dense.make(nrows, ncols); for (int r = 0; r < nrows; r++) { for (int c = 0; c < ncols; c++) { matrix.setQuick(r, c, dm.get(r, c)); } } myDecomposition = new cern.colt.matrix.linalg.EigenvalueDecomposition(matrix); }
// Builds the bidiagonal matrix A out of the lambda private static DoubleMatrix2D buildMatrix(double[] lambda, double x) { int k = lambda.length; DoubleFactory2D F2 = DoubleFactory2D.dense; DoubleMatrix2D A = F2.make(k, k); for (int j = 0; j < k - 1; j++) { A.setQuick(j, j, -lambda[j] * x); A.setQuick(j, j + 1, lambda[j] * x); } A.setQuick(k - 1, k - 1, -lambda[k - 1] * x); return A; }
/** * Classifies an instance w.r.t. the partitions found. It applies a naive min-distance algorithm. * * @param instance the instance to classify * @return the cluster that contains the nearest point to the instance */ public int clusterInstance(Instance instance) throws java.lang.Exception { DoubleMatrix1D u = DoubleFactory1D.dense.make(instance.toDoubleArray()); double min_dist = Double.POSITIVE_INFINITY; int c = -1; for (int i = 0; i < v.rows(); i++) { double dist = distnorm2(u, v.viewRow(i)); if (dist < min_dist) { c = cluster[i]; min_dist = dist; } } return c; }
/** * Computes the complementary distribution <SPAN CLASS="MATH">bar(F)(<I>x</I>)</SPAN>, with <SPAN * CLASS="MATH"><I>λ</I><SUB>i</SUB> =</SPAN> <TT>lambda[<SPAN CLASS="MATH"><I>i</I> - * 1</SPAN>]</TT>, <SPAN CLASS="MATH"><I>i</I> = 1,…, <I>k</I></SPAN>. * * @param lambda rates of the hypoexponential distribution * @param x value at which the complementary distribution is evaluated * @return complementary distribution at <SPAN CLASS="MATH"><I>x</I></SPAN> */ public static double barF(double[] lambda, double x) { testLambda(lambda); if (x <= 0.0) return 1.0; if (x >= Double.MAX_VALUE) return 0.0; DoubleMatrix2D M = buildMatrix(lambda, x); M = DMatrix.expBidiagonal(M); // prob is first row of final matrix int k = lambda.length; double sum = 0; for (int j = 0; j < k; j++) sum += M.getQuick(0, j); return sum; }
/** * Splits recursively the points of the graph while the value of the best cut found is less of a * specified limit (the alpha star factor). * * @param W the weight matrix of the graph * @param alpha_star the alpha star factor * @return an array of sets of points (partitions) */ protected int[][] partition(DoubleMatrix2D W, double alpha_star) { numPartitions++; // System.out.println("!"); // If the graph contains only one point if (W.columns() == 1) { int[][] p = new int[1][1]; p[0][0] = 0; return p; // Otherwise } else { // Computes the best cut int[][] cut = bestCut(W); // Computes the value of the found cut double cutVal = Ncut(W, cut[0], cut[1], null); // System.out.println("cutVal = "+cutVal +"\tnumPartitions = "+numPartitions); // If the value is less than alpha star if (cutVal < alpha_star && numPartitions < 2) { // Recursively partitions the first one found ... DoubleMatrix2D W0 = W.viewSelection(cut[0], cut[0]); int[][] p0 = partition(W0, alpha_star); // ... and the second one DoubleMatrix2D W1 = W.viewSelection(cut[1], cut[1]); int[][] p1 = partition(W1, alpha_star); // Merges the partitions found in the previous recursive steps int[][] p = new int[p0.length + p1.length][]; for (int i = 0; i < p0.length; i++) { p[i] = new int[p0[i].length]; for (int j = 0; j < p0[i].length; j++) p[i][j] = cut[0][p0[i][j]]; } for (int i = 0; i < p1.length; i++) { p[i + p0.length] = new int[p1[i].length]; for (int j = 0; j < p1[i].length; j++) p[i + p0.length][j] = cut[1][p1[i][j]]; } return p; } else { // Otherwise returns the partitions found in current step // w/o recursive invocation int[][] p = new int[1][W.columns()]; for (int i = 0; i < p[0].length; i++) p[0][i] = i; return p; } } }
// assumes m > n public double apply(int first, int second, double third) { // System.out.println("m = " + m); // System.out.println("n = " + n); // System.out.println("first = " + first); if (m_col_mode) { // m == 2nd if (first != m) { // System.out.println("m = " + m); // System.out.println("n = " + n); // System.out.println("first = " + first); // System.out.println("val = " + m_parent.get(first, n) ); m_parent.set(first, n, m_parent.get(first, n) + third); } else { m_parent.set(n, n, m_parent.get(n, n) + third); } } else { // m == first if (second > n) { // System.out.println("m = " + m); // System.out.println("n = " + n); // System.out.println("first = " + first); // System.out.println("second = " + second); // System.out.println("val = " + m_parent.get(second, n) ); m_parent.set(second, n, m_parent.get(second, n) + third); } else if (second < n) { // System.out.println("m = " + m); // System.out.println("n = " + n); // System.out.println("first = " + first); // System.out.println("second = " + second); // System.out.println("val = " + m_parent.get(n, second) ); m_parent.set(n, second, m_parent.get(n, second) + third); } } return 0.; }
@Override public NativeMatrix inverse() { if (rows != cols) { throw new IllegalArgumentException(); } final DoubleMatrix2D ainv = Algebra.ZERO.inverse(new DenseDoubleMatrix2D(u)); final NativeMatrix r = new NativeMatrix(rows, cols, false); for (int i = 0; i < rows; ++i) { for (int j = 0; j < cols; ++j) { r.u[i][j] = ainv.get(i, j); } } return r; }
@Override public DoubleMatrix getEigenvectors() { DoubleMatrix2D V = myDecomposition.getV(); int nrows = V.rows(); int ncols = V.columns(); DoubleMatrix dm = DoubleMatrixFactory.DEFAULT.make(nrows, ncols); for (int r = 0; r < nrows; r++) { for (int c = 0; c < ncols; c++) { dm.set(r, c, V.getQuick(r, c)); } } return dm; }
/** * Generates a clusterer by the mean of spectral clustering algorithm. * * @param data set of instances serving as training data * @exception Exception if the clusterer has not been generated successfully */ public void buildClusterer(Instances data) throws java.lang.Exception { m_Sequences = new Instances(data); int n = data.numInstances(); int k = data.numAttributes(); DoubleMatrix2D w; if (useSparseMatrix) w = DoubleFactory2D.sparse.make(n, n); else w = DoubleFactory2D.dense.make(n, n); double[][] v1 = new double[n][]; for (int i = 0; i < n; i++) v1[i] = data.instance(i).toDoubleArray(); v = DoubleFactory2D.dense.make(v1); double sigma_sq = sigma * sigma; // Sets up similarity matrix for (int i = 0; i < n; i++) for (int j = i; j < n; j++) { /*double dist = distnorm2(v.viewRow(i), v.viewRow(j)); if((r == -1) || (dist < r)) { double sim = Math.exp(- (dist * dist) / (2 * sigma_sq)); w.set(i, j, sim); w.set(j, i, sim); }*/ /* String [] key = {data.instance(i).stringValue(0), data.instance(j).stringValue(0)}; System.out.println(key[0]); System.out.println(key[1]); System.out.println(simScoreMap.containsKey(key)); Double simValue = simScoreMap.get(key);*/ double sim = sim_matrix[i][j]; w.set(i, j, sim); w.set(j, i, sim); } // Partitions points int[][] p = partition(w, alpha_star); // Deploys results numOfClusters = p.length; cluster = new int[n]; for (int i = 0; i < p.length; i++) for (int j = 0; j < p[i].length; j++) cluster[p[i][j]] = i; // System.out.println("Final partition:"); // UtilsJS.printMatrix(p); // System.out.println("Cluster:\n"); // UtilsJS.printArray(cluster); this.numOfClusters = cluster[Utils.maxIndex(cluster)] + 1; // System.out.println("Num clusters:\t"+this.numOfClusters); }
@Override public double[] solve(final double[] bIn) { if (bIn.length != rows()) { throw new IllegalArgumentException(); } final DoubleMatrix2D b = new DenseDoubleMatrix2D(rows(), 1); for (int i = 0; i < rows(); ++i) { b.set(i, 0, bIn[i]); } final DoubleMatrix2D a = new DenseDoubleMatrix2D(u); final DoubleMatrix2D p = Algebra.ZERO.solve(a, b); final double[] r = new double[cols()]; for (int i = 0; i < r.length; ++i) { r[i] = p.get(i, 0); } return r; }
private double multiLL(DoubleMatrix2D coeffs, Node dep, List<Node> indep) { DoubleMatrix2D indepData = factory2D.make(internalData.subsetColumns(indep).getDoubleData().toArray()); List<Node> depList = new ArrayList<>(); depList.add(dep); DoubleMatrix2D depData = factory2D.make(internalData.subsetColumns(depList).getDoubleData().toArray()); int N = indepData.rows(); DoubleMatrix2D probs = Algebra.DEFAULT.mult(factory2D.appendColumns(factory2D.make(N, 1, 1.0), indepData), coeffs); probs = factory2D .appendColumns(factory2D.make(indepData.rows(), 1, 1.0), probs) .assign(Functions.exp); double ll = 0; for (int i = 0; i < N; i++) { DoubleMatrix1D curRow = probs.viewRow(i); curRow.assign(Functions.div(curRow.zSum())); ll += Math.log(curRow.get((int) depData.get(i, 0))); } return ll; }
/** * Feature-specific contribution to the prediction of the value of an instance. * * @param x instance * @param i index of the feature of interest * @param xi value of the feature of interest * @return value of the contribution of the feature to the prediction */ public double prediction(I x, int i, double xi) { double wi = w.getQuick(i); DoubleMatrix1D mi = m.viewRow(i); double pred = 0.0; pred += xi * wi; pred += x.operate( (j, xj) -> { DoubleMatrix1D mj = m.viewRow(j); return xi * xj * mi.zDotProduct(mj); }, (v1, v2) -> v1 + v2); return pred; }
/** * Computes the regression coefficients using the least squares method. This is a model for * multiple linear regression. There are @f$k@f$ regression * coefficients @f$\beta_j@f$, @f$j=0,1,…,(k-1)@f$ and * * @f$k@f$ regressors variables @f$x_j@f$. The model is * @f[ y = \sum_{j=0}^{k-1} \beta_j x_j. * @f] There are @f$n@f$ data points @f$Y_i@f$, @f$X_{ij}@f$, * @f$i=0,1,…,(n-1)@f$, and each @f$X_i@f$ is a @f$k@f$-dimensional point. Given the response * `Y[i]` and the regressor variables `X[i][j]`, @f$\mathtt{i} =0,1,…,(n-1)@f$, @f$\mathtt{j} * =0,1,…,(k-1)@f$, the method computes and returns the array * @f$[\beta_0, \beta_1, …, \beta_{k-1}]@f$. Restriction: @f$n > k@f$. * @param X the regressor variables * @param Y the response * @return the regression coefficients */ public static double[] calcCoefficients(double[][] X, double[] Y) { if (X.length != Y.length) throw new IllegalArgumentException("Lengths of X and Y are not equal"); if (Y.length <= X[0].length + 1) throw new IllegalArgumentException("Not enough points"); final int n = Y.length; final int k = X[0].length; DoubleMatrix2D Xa = new DenseDoubleMatrix2D(n, k); DoubleMatrix2D Ya = new DenseDoubleMatrix2D(n, 1); for (int i = 0; i < n; i++) { for (int j = 0; j < k; j++) { Xa.setQuick(i, j, X[i][j]); } Ya.setQuick(i, 0, Y[i]); } return solution(Xa, Ya, k); }
/** * Predict the value of an instance. * * @param x instance * @return value of prediction */ public double prediction(I x) { double pred = b; DoubleMatrix1D xm = new DenseDoubleMatrix1D(m.columns()); pred += x.operate( (i, xi) -> { double wi = w.getQuick(i); DoubleMatrix1D mi = m.viewRow(i); xm.assign(mi, (r, s) -> r + xi * s); return xi * wi - 0.5 * xi * xi * mi.zDotProduct(mi); }, (v1, v2) -> v1 + v2); pred += 0.5 * xm.zDotProduct(xm); return pred; }
/** * Constructs and returns a new eigenvalue decomposition object; The decomposed matrices can be * retrieved via instance methods of the returned decomposition object. Checks for symmetry, then * constructs the eigenvalue decomposition. * * @param A A square matrix. * @return A decomposition object to access <tt>D</tt> and <tt>V</tt>. * @throws IllegalArgumentException if <tt>A</tt> is not square. */ public EigenvalueDecomposition(DoubleMatrix2D A) { Property.DEFAULT.checkSquare(A); n = A.columns(); V = new double[n][n]; d = new double[n]; e = new double[n]; issymmetric = Property.DEFAULT.isSymmetric(A); if (issymmetric) { for (int i = 0; i < n; i++) { for (int j = 0; j < n; j++) { V[i][j] = A.getQuick(i, j); } } // Tridiagonalize. tred2(); // Diagonalize. tql2(); } else { H = new double[n][n]; ort = new double[n]; for (int j = 0; j < n; j++) { for (int i = 0; i < n; i++) { H[i][j] = A.getQuick(i, j); } } // Reduce to Hessenberg form. orthes(); // Reduce Hessenberg to real Schur form. hqr2(); } }
public void random3Partition(DataSet p1, DataSet p2, DataSet p3, double f1, double f2) { int size1 = (int) (f1 * x.rows()); int size2 = (int) (f2 * x.rows()); int size3 = x.rows() - size1 - size2; Random random = new Random(new Date().getTime()); p1.x = newFromTemplate(p1.x, size1, x.columns()); p1.y = new double[size1]; p2.x = newFromTemplate(p2.x, size2, x.columns()); p2.y = new double[size2]; p3.x = newFromTemplate(p3.x, size3, x.columns()); p3.y = new double[size3]; // FIXME: Verify the uniformity of the partitioning scheme. int r1 = 0; int r2 = 0; int r3 = 0; for (int r = 0; r < x.rows(); r++) { double p = random.nextDouble(); if (p < f1 && r1 < size1) { p1.copyInstanceTo(r1, this, r); r1++; } else if (p < f1 + f2 && r2 < size2) { p2.copyInstanceTo(r2, this, r); r2++; } else if (r3 < size3) { p3.copyInstanceTo(r3, this, r); r3++; } else { r--; } } }
private static void saveDenseDoubleMatrix2D(OutputStream stream, DoubleMatrix2D matrix) throws IOException { BufferedWriter out = new BufferedWriter(new OutputStreamWriter(stream)); double[][] m = matrix.toArray(); for (double[] pu : m) { for (int j = 0; j < pu.length; j++) { out.write(Double.toString(pu[j])); if (j < pu.length - 1) { out.write('\t'); } } out.newLine(); } out.flush(); }
/** * Constructs a new time series data contains for the given row-major data array and the given * list of variables. Each row of the data, data[i], contains a measured for each variable (in * order) for a particular time. The series of times is in increasing order. */ public TimeSeriesData(DoubleMatrix2D matrix, List<String> varNames) { if (matrix == null) { throw new NullPointerException("Data must not be null."); } if (varNames == null) { throw new NullPointerException("Variables must not be null."); } for (int i = 0; i < varNames.size(); i++) { if (varNames.get(i) == null) { throw new NullPointerException("Variable at index " + i + "is null."); } } this.data2 = matrix; if (varNames.size() != matrix.columns()) { throw new IllegalArgumentException( "Number of columns in the data " + "must match the number of variables."); } this.varNames = varNames; this.name = "Time Series Data"; }
static boolean computeLogMi( FeatureGenerator featureGen, double lambda[], DoubleMatrix2D Mi_YY, DoubleMatrix1D Ri_Y, boolean takeExp, boolean reuseM, boolean initMDone) { if (reuseM && initMDone) { Mi_YY = null; } else initMDone = false; if (Mi_YY != null) Mi_YY.assign(0); Ri_Y.assign(0); while (featureGen.hasNext()) { Feature feature = featureGen.next(); int f = feature.index(); int yp = feature.y(); int yprev = feature.yprev(); float val = feature.value(); // System.out.println(feature.toString()); if (yprev < 0) { // this is a single state feature. double oldVal = Ri_Y.getQuick(yp); Ri_Y.setQuick(yp, oldVal + lambda[f] * val); } else if (Mi_YY != null) { Mi_YY.setQuick(yprev, yp, Mi_YY.getQuick(yprev, yp) + lambda[f] * val); initMDone = true; } } if (takeExp) { for (int r = Ri_Y.size() - 1; r >= 0; r--) { Ri_Y.setQuick(r, expE(Ri_Y.getQuick(r))); if (Mi_YY != null) for (int c = Mi_YY.columns() - 1; c >= 0; c--) { Mi_YY.setQuick(r, c, expE(Mi_YY.getQuick(r, c))); } } } return initMDone; }
public void newmanCluster(ItemRegistry registry) { DoubleMatrix2D distance_matrix = DoubleFactory2D.sparse.make( registry.getGraph().getNodeCount(), registry.getGraph().getNodeCount()); DoubleMatrix1D a_matrix = DoubleFactory1D.dense.make(registry.getGraph().getNodeCount(), 0.); Map<String, Cluster> cluster_map = new HashMap<String, Cluster>(); // construct the leaf node distance matrix Iterator edge_iter = registry.getGraph().getEdges(); m_total_distances = 0.; while (edge_iter.hasNext()) { Edge edge = (Edge) edge_iter.next(); Cluster clust1 = (Cluster) edge.getFirstNode(); Cluster clust2 = (Cluster) edge.getSecondNode(); if (cluster_map.get(clust1.getAttribute("id")) == null) { cluster_map.put(clust1.getAttribute("id"), clust1); } if (cluster_map.get(clust2.getAttribute("id")) == null) { cluster_map.put(clust2.getAttribute("id"), clust2); } int n = Integer.parseInt(clust1.getAttribute("id")); int m = Integer.parseInt(clust2.getAttribute("id")); // make reciprocal (big values = good in newman, but not in our case) double dist = 1 / clust1.getCenter().distance(clust2.getCenter()); distance_matrix.set(Math.max(n, m), Math.min(n, m), dist); // m_total_distances += dist; // a_matrix.setQuick( n, a_matrix.getQuick( n ) + dist ); // a_matrix.setQuick( m, a_matrix.getQuick( m ) + dist ); m_total_distances += 1; a_matrix.setQuick(n, a_matrix.getQuick(n) + 1); a_matrix.setQuick(m, a_matrix.getQuick(m) + 1); } // System.out.println(distance_matrix); // System.out.println(count_matrix); // agglomerate nodes until we reach a root node (or nodes) boolean done = false; int trash = 0; QFinder qfinder = new QFinder(); qfinder.a_matrix = a_matrix; QMerger qmerger = new QMerger(); while (!done) { // find the minimum cluster distance qfinder.reset(); distance_matrix.forEachNonZero(qfinder); // done = true; // System.out.println(distance_matrix); // System.out.println(count_matrix); if (qfinder.getVal() == -Double.MAX_VALUE) { break; } // add a parent cluster to the graph Cluster clust1 = cluster_map.get("" + qfinder.getM()); Cluster clust2 = cluster_map.get("" + qfinder.getN()); while (clust1.getParent() != null) { clust1 = clust1.getParent(); } while (clust2.getParent() != null) { clust2 = clust2.getParent(); } trash++; double dist = Math.max(clust1.getHeight(), clust2.getHeight()); Cluster new_cluster = new DefaultCluster( (float) (clust1.getCenter().getX() + clust2.getCenter().getX()) / 2.f, (float) (clust1.getCenter().getY() + clust2.getCenter().getY()) / 2.f, (float) Math.sqrt( clust1.getRadius() * clust1.getRadius() + clust2.getRadius() * clust2.getRadius()), clust1, clust2, dist); registry.getGraph().addNode(new_cluster); // merge the clusters distances / counts int M = Math.max(qfinder.getM(), qfinder.getN()); int N = Math.min(qfinder.getM(), qfinder.getN()); a_matrix.set(N, a_matrix.getQuick(M) + a_matrix.getQuick(N)); a_matrix.set(M, 0); // System.out.println("M = "+M+" N = "+N + " VAL=" + minfinder.getVal() ); qmerger.setM(M); qmerger.setN(N); qmerger.setParent(distance_matrix); qmerger.setMode(true); // System.out.println(distance_matrix.viewPart( 0, M, distance_matrix.rows(), 1)); distance_matrix.viewPart(0, M, distance_matrix.rows(), 1).forEachNonZero(qmerger); qmerger.setMode(false); // System.out.println(distance_matrix.viewPart( M, 0, 1, M )); distance_matrix.viewPart(M, 0, 1, M).forEachNonZero(qmerger); // System.out.println(distance_matrix); // System.out.println(count_matrix); // free any superfluous memory randomly ~ (1/20) times if (Math.random() > 0.95) { distance_matrix.trimToSize(); } } }
@Test public void testInclinedPlane() throws IOException { DoubleMatrix1D normal = new DenseDoubleMatrix1D(3); normal.assign(new double[] {.0, .0, 1.0}); InclinedPlane3D inclinedPlane = new InclinedPlane3D(); inclinedPlane.setRandomGenerator(new MersenneTwister(123456789)); inclinedPlane.setNormal(normal); inclinedPlane.setBounds(new Rectangle(-5, -5, 10, 10)); inclinedPlane.setNoiseStd(0.5); DoubleMatrix2D data = inclinedPlane.generate(10); SVDPCA pca = new SVDPCA(data); System.out.println("Eigenvalues:"); System.out.println(pca.getEigenvalues()); System.out.println("Eigenvectors:"); System.out.println(pca.getEigenvectors()); System.out.println("Meanvector:"); System.out.println(pca.getMean()); // Recalculate the input from a truncated SVD, first calculate the mean DoubleMatrix1D mean = new SparseDoubleMatrix1D(3); for (int i = 0; i < data.rows(); ++i) { mean.assign(data.viewRow(i), Functions.plus); } mean.assign(Functions.div(data.rows())); // Truncate the SVD and calculate the coefficient matrix DenseDoubleMatrix2D coefficients = new DenseDoubleMatrix2D(data.rows(), 2); DoubleMatrix2D centeredInput = data.copy(); for (int i = 0; i < data.rows(); ++i) { centeredInput.viewRow(i).assign(mean, Functions.minus); } centeredInput.zMult( pca.getEigenvectors().viewPart(0, 0, 2, 3), coefficients, 1, 0, false, true); // Reconstruct the data from the lower dimensional information DoubleMatrix2D reconstruction = data.copy(); for (int i = 0; i < reconstruction.rows(); ++i) { reconstruction.viewRow(i).assign(mean); } coefficients.zMult( pca.getEigenvectors().viewPart(0, 0, 2, 3), reconstruction, 1, 1, false, false); // Output to file (can be read by GNU Plot) String fileName = "inclined-plane-svd-pca.dat"; String packagePath = this.getClass().getPackage().getName().replaceAll("\\.", "/"); File outputFile = new File("src/test/resources/" + packagePath + "/" + fileName); PrintWriter writer = new PrintWriter(outputFile); writer.write(data.toString()); writer.close(); }
/** * Computes the association degree between two partitions of a graph.<br> * The association degree is defined as the sum of the weights of all the edges between points of * the two partitions. * * @param W the weight matrix of the graph * @param a the points of the first partition * @param b the points of the second partition * @return the association degree */ protected static double asso(DoubleMatrix2D W, int[] a, int[] b) { return W.viewSelection(a, b).zSum(); }
/** * Returns the best cut of a graph w.r.t. the degree of dissimilarity between points of different * partitions and the degree of similarity between points of the same partition. * * @param W the weight matrix of the graph * @return an array of two elements, each of these contains the points of a partition */ protected static int[][] bestCut(DoubleMatrix2D W) { int n = W.columns(); // Builds the diagonal matrices D and D^(-1/2) (represented as their diagonals) DoubleMatrix1D d = DoubleFactory1D.dense.make(n); DoubleMatrix1D d_minus_1_2 = DoubleFactory1D.dense.make(n); for (int i = 0; i < n; i++) { double d_i = W.viewRow(i).zSum(); d.set(i, d_i); d_minus_1_2.set(i, 1 / Math.sqrt(d_i)); } DoubleMatrix2D D = DoubleFactory2D.sparse.diagonal(d); // System.out.println("DoubleMatrix2D :\n"+D.toString()); DoubleMatrix2D X = D.copy(); // System.out.println("DoubleMatrix2D copy :\n"+X.toString()); // X = D^(-1/2) * (D - W) * D^(-1/2) X.assign(W, Functions.minus); // System.out.println("DoubleMatrix2D X: (D-W) :\n"+X.toString()); for (int i = 0; i < n; i++) for (int j = 0; j < n; j++) X.set(i, j, X.get(i, j) * d_minus_1_2.get(i) * d_minus_1_2.get(j)); // Computes the eigenvalues and the eigenvectors of X EigenvalueDecomposition e = new EigenvalueDecomposition(X); DoubleMatrix1D lambda = e.getRealEigenvalues(); // Selects the eigenvector z_2 associated with the second smallest eigenvalue // Creates a map that contains the pairs <index, eigenvalue> AbstractIntDoubleMap map = new OpenIntDoubleHashMap(n); for (int i = 0; i < n; i++) map.put(i, Math.abs(lambda.get(i))); IntArrayList list = new IntArrayList(); // Sorts the map on the value map.keysSortedByValue(list); // Gets the index of the second smallest element int i_2 = list.get(1); // y_2 = D^(-1/2) * z_2 DoubleMatrix1D y_2 = e.getV().viewColumn(i_2).copy(); y_2.assign(d_minus_1_2, Functions.mult); // Creates a map that contains the pairs <i, y_2[i]> map.clear(); for (int i = 0; i < n; i++) map.put(i, y_2.get(i)); // Sorts the map on the value map.keysSortedByValue(list); // Search the element in the map previuosly ordered that minimizes the cut // of the partition double best_cut = Double.POSITIVE_INFINITY; int[][] partition = new int[2][]; // The array v contains all the elements of the graph ordered by their // projection on vector y_2 int[] v = list.elements(); // For each admissible splitting point i for (int i = 1; i < n; i++) { // The array a contains all the elements that have a projection on vector // y_2 less or equal to the one of i-th element // The array b contains the remaining elements int[] a = new int[i]; int[] b = new int[n - i]; System.arraycopy(v, 0, a, 0, i); System.arraycopy(v, i, b, 0, n - i); double cut = Ncut(W, a, b, v); if (cut < best_cut) { best_cut = cut; partition[0] = a; partition[1] = b; } } // System.out.println("Partition:"); // UtilsJS.printMatrix(partition); return partition; }