Exemple #1
0
 public void train() {
   for (int i = 0; i < weights.getM(); ++i) {
     for (int j = 0; j < weights.getN(); ++j) {
       weights.set(i, j, weights.get(i, j) + (learning * inputs.get(i) * errors.get(j)));
     }
   }
 }
Exemple #2
0
 public Vector backProp(Vector error) {
   derivs = fcn.applyDerivative(outputs);
   errors = derivs.multiply(error);
   Vector blame = weights.transpose().multiply(errors);
   // take off the bias signal, the previous layer need not know
   return blame.slice(blame.dim() - 1);
 }
Exemple #3
0
 public Vector output(Vector inputs) {
   Vector in = inputs.append(bias);
   this.inputs = in;
   in = weights.multiply(in);
   outputs = fcn.apply(in);
   return outputs;
 }