private static DenseMatrix64F multByExtract( int operationType, D1Submatrix64F subA, D1Submatrix64F subB, D1Submatrix64F subC) { SimpleMatrix A = subA.extract(); SimpleMatrix B = subB.extract(); SimpleMatrix C = subC.extract(); if (operationType > 0) return A.mult(B).plus(C).getMatrix(); else if (operationType < 0) return C.minus(A.mult(B)).getMatrix(); else return A.mult(B).getMatrix(); }
public int predictState(int from_state, int number_of_steps) { long lStartTime = System.currentTimeMillis(); int from_bin = getBinNumber((float) from_state); SimpleMatrix result = new SimpleMatrix(transition_matrix); for (int i = 1; i < number_of_steps; i++) result = result.mult(transition_matrix); SimpleMatrix row_result = result.extractVector(true, from_bin); // row_result.print(); double maxVal = row_result.get(0, 0); int maxValIndex = 0; for (int i = 1; i < row_result.numCols(); i++) { if (row_result.get(0, i) >= maxVal) { maxVal = row_result.get(0, i); maxValIndex = i; } } long lEndTime = System.currentTimeMillis(); logger.info( ("From: " + from_state + " In NumberOfSteps: " + number_of_steps + " MaxProbabilityBin: " + maxValIndex + " MostProbablyTo: " + getOrgValFromBinNumber(maxValIndex))); logger.info(("Time taken for prediction = " + (lEndTime - lStartTime))); return getOrgValFromBinNumber(maxValIndex); }
private void checkDecomposition(int height, int width, boolean compact) { QRDecomposition<DenseMatrix64F> alg = createQRDecomposition(); SimpleMatrix A = new SimpleMatrix(height, width); RandomMatrices.setRandom(A.getMatrix(), rand); assertTrue(alg.decompose(A.copy().getMatrix())); int minStride = Math.min(height, width); SimpleMatrix Q = new SimpleMatrix(height, compact ? minStride : height); alg.getQ(Q.getMatrix(), compact); SimpleMatrix R = new SimpleMatrix(compact ? minStride : height, width); alg.getR(R.getMatrix(), compact); // see if Q has the expected properties assertTrue(MatrixFeatures.isOrthogonal(Q.getMatrix(), 1e-6)); // UtilEjml.print(alg.getQR()); // Q.print(); // R.print(); // see if it has the expected properties DenseMatrix64F A_found = Q.mult(R).getMatrix(); EjmlUnitTests.assertEquals(A.getMatrix(), A_found, 1e-6); assertTrue(Q.transpose().mult(A).isIdentical(R, 1e-6)); }
@Override public double[][] predict(List<PredictionPaper> testDocs) { String testData = "lda/test.dat"; createLdaInputTest(testData, testDocs); Utils.runCommand( "lib/lda-c-dist/lda inf " + " lib/lda-c-dist/settings.txt " + "lda/final " + testData + " lda/output", false); double[][] gammasMatrix = Utils.readMatrix("lda/output-gamma.dat", false); double alpha = Utils.readAlpha("lda/final.other"); for (int i = 0; i < gammasMatrix.length; i++) { for (int j = 0; j < gammasMatrix[i].length; j++) { gammasMatrix[i][j] -= alpha; } } SimpleMatrix gammas = new SimpleMatrix(gammasMatrix); SimpleMatrix beta = new SimpleMatrix(betaMatrix); SimpleMatrix probabilities = gammas.mult(beta); double[][] result = new double[probabilities.numRows()][probabilities.numCols()]; for (int row = 0; row < probabilities.numRows(); row++) { for (int col = 0; col < probabilities.numCols(); col++) { result[row][col] = probabilities.get(row, col); } } return result; }
public void translate(double x, double y) { matrix = matrix.mult( new SimpleMatrix( new double[][] { {1, 0, 0}, {0, 1, 0}, {x, y, 1} })); }
public void rotate(Vector heading, Vector side) { matrix = matrix.mult( new SimpleMatrix( new double[][] { {heading.X(), heading.Y(), 0}, {side.X(), side.Y(), 0}, {0, 0, 1} })); }
/** H0 = H*M P=[M|m] from canonical camera */ private SimpleMatrix computeHZero(DenseMatrix64F F, Point3D_F64 e2, SimpleMatrix H) { Vector3D_F64 v = new Vector3D_F64(.1, 0.5, .2); // need to make sure M is not singular for this technique to work SimpleMatrix P = SimpleMatrix.wrap(MultiViewOps.canonicalCamera(F, e2, v, 1)); SimpleMatrix M = P.extractMatrix(0, 3, 0, 3); return H.mult(M); }
public void rotate(double angle) { double sin = Math.sin(angle); double cos = Math.cos(angle); matrix = matrix.mult( new SimpleMatrix( new double[][] { {cos, sin, 0}, {-sin, cos, 0}, {0, 0, 1} })); }
/** * Compute rectification transforms for the stereo pair given a fundamental matrix and its * observations. * * @param F Fundamental matrix * @param observations Observations used to compute F * @param width Width of first image. * @param height Height of first image. */ public void process(DenseMatrix64F F, List<AssociatedPair> observations, int width, int height) { int centerX = width / 2; int centerY = height / 2; MultiViewOps.extractEpipoles(F, epipole1, epipole2); checkEpipoleInside(width, height); // compute the transform H which will send epipole2 to infinity SimpleMatrix R = rotateEpipole(epipole2, centerX, centerY); SimpleMatrix T = translateToOrigin(centerX, centerY); SimpleMatrix G = computeG(epipole2, centerX, centerY); SimpleMatrix H = G.mult(R).mult(T); // Find the two matching transforms SimpleMatrix Hzero = computeHZero(F, epipole2, H); SimpleMatrix Ha = computeAffineH(observations, H.getMatrix(), Hzero.getMatrix()); rect1.set(Ha.mult(Hzero).getMatrix()); rect2.set(H.getMatrix()); }
public void backpropDerivative( Tree tree, List<String> words, IdentityHashMap<Tree, SimpleMatrix> nodeVectors, TwoDimensionalMap<String, String, SimpleMatrix> binaryW_dfs, Map<String, SimpleMatrix> unaryW_dfs, TwoDimensionalMap<String, String, SimpleMatrix> binaryScoreDerivatives, Map<String, SimpleMatrix> unaryScoreDerivatives, Map<String, SimpleMatrix> wordVectorDerivatives, SimpleMatrix deltaUp) { if (tree.isLeaf()) { return; } if (tree.isPreTerminal()) { if (op.trainOptions.trainWordVectors) { String word = tree.children()[0].label().value(); word = dvModel.getVocabWord(word); // SimpleMatrix currentVector = nodeVectors.get(tree); // SimpleMatrix currentVectorDerivative = // nonlinearityVectorToDerivative(currentVector); // SimpleMatrix derivative = deltaUp.elementMult(currentVectorDerivative); SimpleMatrix derivative = deltaUp; wordVectorDerivatives.put(word, wordVectorDerivatives.get(word).plus(derivative)); } return; } SimpleMatrix currentVector = nodeVectors.get(tree); SimpleMatrix currentVectorDerivative = NeuralUtils.elementwiseApplyTanhDerivative(currentVector); SimpleMatrix scoreW = dvModel.getScoreWForNode(tree); currentVectorDerivative = currentVectorDerivative.elementMult(scoreW.transpose()); // the delta that is used at the current nodes SimpleMatrix deltaCurrent = deltaUp.plus(currentVectorDerivative); SimpleMatrix W = dvModel.getWForNode(tree); SimpleMatrix WTdelta = W.transpose().mult(deltaCurrent); if (tree.children().length == 2) { // TODO: RS: Change to the nice "getWForNode" setup? String leftLabel = dvModel.basicCategory(tree.children()[0].label().value()); String rightLabel = dvModel.basicCategory(tree.children()[1].label().value()); binaryScoreDerivatives.put( leftLabel, rightLabel, binaryScoreDerivatives.get(leftLabel, rightLabel).plus(currentVector.transpose())); SimpleMatrix leftVector = nodeVectors.get(tree.children()[0]); SimpleMatrix rightVector = nodeVectors.get(tree.children()[1]); SimpleMatrix childrenVector = NeuralUtils.concatenateWithBias(leftVector, rightVector); if (op.trainOptions.useContextWords) { childrenVector = concatenateContextWords(childrenVector, tree.getSpan(), words); } SimpleMatrix W_df = deltaCurrent.mult(childrenVector.transpose()); binaryW_dfs.put(leftLabel, rightLabel, binaryW_dfs.get(leftLabel, rightLabel).plus(W_df)); // and then recurse SimpleMatrix leftDerivative = NeuralUtils.elementwiseApplyTanhDerivative(leftVector); SimpleMatrix rightDerivative = NeuralUtils.elementwiseApplyTanhDerivative(rightVector); SimpleMatrix leftWTDelta = WTdelta.extractMatrix(0, deltaCurrent.numRows(), 0, 1); SimpleMatrix rightWTDelta = WTdelta.extractMatrix(deltaCurrent.numRows(), deltaCurrent.numRows() * 2, 0, 1); backpropDerivative( tree.children()[0], words, nodeVectors, binaryW_dfs, unaryW_dfs, binaryScoreDerivatives, unaryScoreDerivatives, wordVectorDerivatives, leftDerivative.elementMult(leftWTDelta)); backpropDerivative( tree.children()[1], words, nodeVectors, binaryW_dfs, unaryW_dfs, binaryScoreDerivatives, unaryScoreDerivatives, wordVectorDerivatives, rightDerivative.elementMult(rightWTDelta)); } else if (tree.children().length == 1) { String childLabel = dvModel.basicCategory(tree.children()[0].label().value()); unaryScoreDerivatives.put( childLabel, unaryScoreDerivatives.get(childLabel).plus(currentVector.transpose())); SimpleMatrix childVector = nodeVectors.get(tree.children()[0]); SimpleMatrix childVectorWithBias = NeuralUtils.concatenateWithBias(childVector); if (op.trainOptions.useContextWords) { childVectorWithBias = concatenateContextWords(childVectorWithBias, tree.getSpan(), words); } SimpleMatrix W_df = deltaCurrent.mult(childVectorWithBias.transpose()); // System.out.println("unary backprop derivative for " + childLabel); // System.out.println("Old transform:"); // System.out.println(unaryW_dfs.get(childLabel)); // System.out.println(" Delta:"); // System.out.println(W_df.scale(scale)); unaryW_dfs.put(childLabel, unaryW_dfs.get(childLabel).plus(W_df)); // and then recurse SimpleMatrix childDerivative = NeuralUtils.elementwiseApplyTanhDerivative(childVector); // SimpleMatrix childDerivative = childVector; SimpleMatrix childWTDelta = WTdelta.extractMatrix(0, deltaCurrent.numRows(), 0, 1); backpropDerivative( tree.children()[0], words, nodeVectors, binaryW_dfs, unaryW_dfs, binaryScoreDerivatives, unaryScoreDerivatives, wordVectorDerivatives, childDerivative.elementMult(childWTDelta)); } }
private void forwardPropagateTree( Tree tree, List<String> words, IdentityHashMap<Tree, SimpleMatrix> nodeVectors, IdentityHashMap<Tree, Double> scores) { if (tree.isLeaf()) { return; } if (tree.isPreTerminal()) { Tree wordNode = tree.children()[0]; String word = wordNode.label().value(); SimpleMatrix wordVector = dvModel.getWordVector(word); wordVector = NeuralUtils.elementwiseApplyTanh(wordVector); nodeVectors.put(tree, wordVector); return; } for (Tree child : tree.children()) { forwardPropagateTree(child, words, nodeVectors, scores); } // at this point, nodeVectors contains the vectors for all of // the children of tree SimpleMatrix childVec; if (tree.children().length == 2) { childVec = NeuralUtils.concatenateWithBias( nodeVectors.get(tree.children()[0]), nodeVectors.get(tree.children()[1])); } else { childVec = NeuralUtils.concatenateWithBias(nodeVectors.get(tree.children()[0])); } if (op.trainOptions.useContextWords) { childVec = concatenateContextWords(childVec, tree.getSpan(), words); } SimpleMatrix W = dvModel.getWForNode(tree); if (W == null) { String error = "Could not find W for tree " + tree; if (op.testOptions.verbose) { System.err.println(error); } throw new NoSuchParseException(error); } SimpleMatrix currentVector = W.mult(childVec); currentVector = NeuralUtils.elementwiseApplyTanh(currentVector); nodeVectors.put(tree, currentVector); SimpleMatrix scoreW = dvModel.getScoreWForNode(tree); if (scoreW == null) { String error = "Could not find scoreW for tree " + tree; if (op.testOptions.verbose) { System.err.println(error); } throw new NoSuchParseException(error); } double score = scoreW.dot(currentVector); // score = NeuralUtils.sigmoid(score); scores.put(tree, score); // System.err.print(Double.toString(score)+" "); }