public DMatrix sub(DMatrix other) { assert this.length() == other.length() : System.out.printf("Length is not equal. %d - %d\n", this.length(), other.length()); DMatrix m = new CUDAMatrix(this.rows(), this.columns(), this.toArray()); SimpleCuBlas.axpy(-1.0, other, m); return m; }
public static DMatrix randn(int r, int c, boolean _persist) { DMatrix m = new CUDAMatrix(r, c); m.persist = _persist; for (int i = 0; i < r * c; i++) m.put(i, RandomUtils.nextGaussian()); if (m.persist) { m.cPointer = SimpleCuBlas.alloc(m.data()); } return m; }
/** * update with give grad and hess * * @param dtrain training data * @param grad first order of gradient * @param hess seconde order of gradient * @throws XGBoostError native error */ public void boost(DMatrix dtrain, float[] grad, float[] hess) throws XGBoostError { if (grad.length != hess.length) { throw new AssertionError( String.format("grad/hess length mismatch %s / %s", grad.length, hess.length)); } JNIErrorHandle.checkCall( XgboostJNI.XGBoosterBoostOneIter(handle, dtrain.getHandle(), grad, hess)); }
/** * Computes the density function <SPAN CLASS="MATH"><I>f</I> (<I>x</I>)</SPAN>, with <SPAN * CLASS="MATH"><I>λ</I><SUB>i</SUB> =</SPAN> <TT>lambda[<SPAN CLASS="MATH"><I>i</I> - * 1</SPAN>]</TT>, <SPAN CLASS="MATH"><I>i</I> = 1,…, <I>k</I></SPAN>. * * @param lambda rates of the hypoexponential distribution * @param x value at which the density is evaluated * @return density at <SPAN CLASS="MATH"><I>x</I></SPAN> */ public static double density(double[] lambda, double x) { testLambda(lambda); if (x < 0) return 0; DoubleMatrix2D Ax = buildMatrix(lambda, x); DoubleMatrix2D M = DMatrix.expBidiagonal(Ax); int k = lambda.length; return lambda[k - 1] * M.getQuick(0, k - 1); }
/** * Computes the complementary distribution <SPAN CLASS="MATH">bar(F)(<I>x</I>)</SPAN>, with <SPAN * CLASS="MATH"><I>λ</I><SUB>i</SUB> =</SPAN> <TT>lambda[<SPAN CLASS="MATH"><I>i</I> - * 1</SPAN>]</TT>, <SPAN CLASS="MATH"><I>i</I> = 1,…, <I>k</I></SPAN>. * * @param lambda rates of the hypoexponential distribution * @param x value at which the complementary distribution is evaluated * @return complementary distribution at <SPAN CLASS="MATH"><I>x</I></SPAN> */ public static double barF(double[] lambda, double x) { testLambda(lambda); if (x <= 0.0) return 1.0; if (x >= Double.MAX_VALUE) return 0.0; DoubleMatrix2D M = buildMatrix(lambda, x); M = DMatrix.expBidiagonal(M); // prob is first row of final matrix int k = lambda.length; double sum = 0; for (int j = 0; j < k; j++) sum += M.getQuick(0, j); return sum; }
/** * base function for Predict * * @param data data * @param outPutMargin output margin * @param treeLimit limit number of trees * @param predLeaf prediction minimum to keep leafs * @return predict results */ private synchronized float[][] pred( DMatrix data, boolean outPutMargin, int treeLimit, boolean predLeaf) throws XGBoostError { int optionMask = 0; if (outPutMargin) { optionMask = 1; } if (predLeaf) { optionMask = 2; } float[][] rawPredicts = new float[1][]; JNIErrorHandle.checkCall( XgboostJNI.XGBoosterPredict(handle, data.getHandle(), optionMask, treeLimit, rawPredicts)); int row = (int) data.rowNum(); int col = rawPredicts[0].length / row; float[][] predicts = new float[row][col]; int r, c; for (int i = 0; i < rawPredicts[0].length; i++) { r = i / col; c = i % col; predicts[r][c] = rawPredicts[0][i]; } return predicts; }
private void test(DMatrix a) { int m = a.getM(); int n = a.getN(); DMatrixLud lud = new DMatrixLud(a); int[] piv = lud.getPivot(); DMatrix l = lud.getL(); DMatrix u = lud.getU(); DMatrix lu = l.times(u); assertEqualFuzzy(a.get(piv, null), lu); if (m == n) { int nrhs = 2; DMatrix b = DMatrix.random(m, nrhs); DMatrix x = lud.solve(b); DMatrix ax = a.times(x); assertEqualFuzzy(ax, b); } }
public DMatrix mmul(boolean tA, boolean tB, DMatrix B) { // assert (this.columns()==B.rows()); DMatrix C = new CUDAMatrix(this.rows(), B.columns()); return mmul(tA, tB, B, C); }
public DMatrix muli(DMatrix other) { assert (this.length() == other.length()); SimpleCuBlas.mul(this, other, this); return this; }
public DMatrix mul(DMatrix other) { assert (this.length() == other.length()); DMatrix m = new CUDAMatrix(this.rows(), this.columns()); SimpleCuBlas.mul(this, other, m); return m; }
public DMatrix dotRows(DMatrix B) { assert (this.rows() == B.rows() && this.columns() == B.columns()); return this.mul(B).sumColumns(); }
public DMatrix vectorNorm() { DMatrix m = DMath.createMatrix(this.rows(), this.columns(), this.toArray()); DMatrix Denom = this.mul(this).sumColumns(); Denom.sqrti(); return m.divRows(Denom); }
public DMatrix mulRows(DMatrix colVector) { assert (this.rows() == colVector.rows() && colVector.columns() == 1); DMatrix m = new CUDAMatrix(this.rows(), this.columns(), this.toArray()); SimpleCuBlas.mulRows(colVector, m); return m; }
public DMatrix mmuli(DMatrix B) { assert (this.columns() == B.rows()); return mmul(false, false, B, this); }
public DMatrix mmul(DMatrix B) { // assert (this.columns()==B.rows()); DMatrix C = new CUDAMatrix(this.rows(), B.columns()); return mmul(false, false, B, C); }
public static DMatrix randn(int r, int c) { DMatrix m = new CUDAMatrix(r, c); for (int i = 0; i < r * c; i++) m.put(i, RandomUtils.nextGaussian()); return m; }
public void testRandom() { test(DMatrix.random(100, 100)); test(DMatrix.random(101, 100)); }
// result = this*other public DMatrix mmul(boolean tA, boolean tB, DMatrix B, DMatrix C) { int m = tA ? this.columns() : this.rows(); int n = tB ? B.rows() : B.columns(); int k = tA ? this.rows() : this.columns(); int kB = tB ? B.columns() : B.rows(); assert (k == kB); if (C.rows != m || C.columns != n) { if (C != this && C != B) { C.resize(m, n); } else { System.err.printf( "[ALERT] Should not resize result matrix because it is used in-place. But doing it anyway.\n"); } } if (C == this || C == B) { /* actually, blas cannot do multiplications in-place. Therefore, we will fake by * * allocating a temporary object on the side and copy the result later. * */ DMatrix temp = new CUDAMatrix(m, n); if (m == 1) { SimpleCuBlas.gemv(tB, B, this, temp, 1.0, 0.0); } else { SimpleCuBlas.gemm(tA, tB, this, B, temp, 1.0, 0.0); } if (temp.rows() == C.rows() && temp.columns() == C.columns()) SimpleCuBlas.copy(temp, C); else { C.resize(m, n); SimpleCuBlas.copy(temp, C); } } else { if (m == 1) { SimpleCuBlas.gemv(tB, B, this, C, 1.0, 0.0); } else { SimpleCuBlas.gemm(tA, tB, this, B, C, 1.0, 0.0); } } return C; }
// y = 1*x+y public DMatrix add(DMatrix other) { assert (this.length() == other.length()); DMatrix m = new CUDAMatrix(this.rows, this.columns, this.data()); SimpleCuBlas.axpy(1.0, other, m); return m; }
public DMatrix fillWithArray(DMatrix other) { assert (this.length() % other.length() == 0); SimpleCuBlas.fillWithArray(other, this); return this; }
public DMatrix addi(DMatrix other) { assert (this.length() == other.length()); // System.out.printf("Using cuda blas\n"); SimpleCuBlas.axpy(1.0, other, this); return this; }
public DMatrix mulRowsi(DMatrix colVector) { assert (this.rows() == colVector.rows() && colVector.columns() == 1); SimpleCuBlas.mulRows(colVector, this); return this; }
public DMatrix subi(DMatrix other) { assert (this.length() == other.length()); SimpleCuBlas.axpy(-1.0, other, this); return this; }
public DMatrix rowNorms() { DMatrix norm = this.mul(this).sumColumns(); norm.sqrti(); return norm; }
public DMatrix sub(double v) { DMatrix m = DMath.createMatrix(this.rows(), this.columns(), this.toArray()); for (int i = 0; i < this.length(); i++) m.put(i, m.get(i) - v); return m; }
public static DMatrix ones(int r, int c) { DMatrix m = new CUDAMatrix(r, c); for (int i = 0; i < r * c; i++) m.put(i, 1.0); return m; }
/** * Update (one iteration) * * @param dtrain training data * @param iter current iteration number * @throws XGBoostError native error */ public void update(DMatrix dtrain, int iter) throws XGBoostError { JNIErrorHandle.checkCall(XgboostJNI.XGBoosterUpdateOneIter(handle, iter, dtrain.getHandle())); }