private void modifyWeights( DoubleMatrix trainingExample, DoubleMatrix result, DoubleMatrix output, double learningFactor, double momentum, List<DoubleMatrix> previousModifications, EvaluationContext evalCtx) { List<DoubleMatrix> errors = countErrors(result, output); List<Layer> layers = getLayers(); evalCtx.resetContext(); Iterator<ActivationFunction> activationFunctionIter = evalCtx.getActivationFunction(); DoubleMatrix temporalResult = trainingExample; for (int i = 0; i < errors.size(); i++) { DoubleMatrix error = errors.get(i); int layerIndex = i + 1; Layer layer = layers.get(layerIndex); DoubleMatrix previousModification = previousModifications.get(i); ActivationFunction activationFunc = activationFunctionIter.next(); ActivationFunctionDerivative derivative = DerivativeFactory.getInstance().getDerivative(activationFunc); if (layer.includeBias()) { temporalResult = addBiasInput(temporalResult); } DoubleMatrix oldVal = temporalResult.dup(); temporalResult = layer.getWeights().mmul(temporalResult); temporalResult = activationFunc.eval(temporalResult); // dla kazdego neuronu w warstwie for (int j = 0; j < layer.getWeights().rows; j++) { double derVal = derivative.evaluate(temporalResult.get(j)); DoubleMatrix oldDelta = previousModification.getRow(j); DoubleMatrix delta = oldVal.mul(derVal).mul(learningFactor).mul(error.get(j)); delta = delta.transpose(); delta = delta.add(oldDelta.mul(momentum)); previousModification.putRow(j, delta); DoubleMatrix oldWeights = layer.getWeights().getRow(j); DoubleMatrix newWeights = oldWeights.add(delta); layer.getWeights().putRow(j, newWeights); } } }