private Matrix createRandomMatrix(int rows, int cols) { Matrix random = new Matrix(rows, cols); return random.map( new Matrix.Function() { @Override public double function(double x) { return Math.random(); } }); }
private Matrix applyFunctionDerivative(Matrix input) { Matrix activated = (Matrix) input.clone(); if (function instanceof Softmax) activated = activated.map( new Matrix.Function() { @Override public double function(double x) { return Math.exp(x); } }); else activated = activated.map( new Matrix.Function() { @Override public double function(double x) { return function.derivative(x); } }); if (function instanceof Softmax) { double sum = activated.sum(); if (sum != 0) activated = activated.multiply(1 / sum); activated = activated.subtract(input); activated = activated.map( new Matrix.Function() { @Override public double function(double x) { return function.activate(x); } }); } return activated; }
/** * Calculate the cross entropy error of the neural network. * * @param x The input to the neural network. * @param y The expected output. * @return The cross entropy error. */ public double crossEntropyError(Matrix x, Matrix y) { Matrix y_ = predict(x); double j = y_.multiply( y.map( new Matrix.Function() { @Override public double function(double x) { return Math.log(x); } })) .sum(); return -j; }