public NeuralNetwork(int input, int hidden, int output) { this.layers = new int[] {input, hidden, output}; df = new DecimalFormat("#0.000#"); ef = new DecimalFormat("#"); /** Create all neurons and connections Connections are created in the neuron class */ for (int i = 0; i < layers.length; i++) { if (i == 0) { // input layer for (int j = 0; j < layers[i]; j++) { Neuron neuron = new Neuron(); inputLayer.add(neuron); } } else if (i == 1) { // hidden layer for (int j = 0; j < layers[i]; j++) { Neuron neuron = new Neuron(); neuron.addInConnectionsS(inputLayer); neuron.addBiasConnection(bias); hiddenLayer.add(neuron); } } else if (i == 2) { // output layer for (int j = 0; j < layers[i]; j++) { Neuron neuron = new Neuron(); neuron.addInConnectionsS(hiddenLayer); neuron.addBiasConnection(bias); outputLayer.add(neuron); } } else { System.out.println("!Error NeuralNetwork init"); } } // initialize random weights for (Neuron neuron : hiddenLayer) { ArrayList<Connection> connections = neuron.getAllInConnections(); for (Connection conn : connections) { double newWeight = getRandom(); conn.setWeight(newWeight); } } for (Neuron neuron : outputLayer) { ArrayList<Connection> connections = neuron.getAllInConnections(); for (Connection conn : connections) { double newWeight = getRandom(); conn.setWeight(newWeight); } } // reset id counters Neuron.counter = 0; Connection.counter = 0; if (isTrained) { trainedWeights(); updateAllWeights(); } }
/** * all output propagate back * * @param expectedOutput first calculate the partial derivative of the error with respect to each * of the weight leading into the output neurons bias is also updated here */ public void applyBackpropagation(double expectedOutput[]) { // error check, normalize value ]0;1[ for (int i = 0; i < expectedOutput.length; i++) { double d = expectedOutput[i]; if (d < 0 || d > 1) { if (d < 0) expectedOutput[i] = 0 + epsilon; else expectedOutput[i] = 1 - epsilon; } } int i = 0; for (Neuron n : outputLayer) { ArrayList<Connection> connections = n.getAllInConnections(); for (Connection con : connections) { double ak = n.getOutput(); double ai = con.leftNeuron.getOutput(); double desiredOutput = expectedOutput[i]; double partialDerivative = -ak * (1 - ak) * ai * (desiredOutput - ak); double deltaWeight = -learningRate * partialDerivative; double newWeight = con.getWeight() + deltaWeight; con.setDeltaWeight(deltaWeight); con.setWeight(newWeight + momentum * con.getPrevDeltaWeight()); } i++; } // update weights for the hidden layer for (Neuron n : hiddenLayer) { ArrayList<Connection> connections = n.getAllInConnections(); for (Connection con : connections) { double aj = n.getOutput(); double ai = con.leftNeuron.getOutput(); double sumKoutputs = 0; int j = 0; for (Neuron out_neu : outputLayer) { double wjk = out_neu.getConnection(n.id).getWeight(); double desiredOutput = (double) expectedOutput[j]; double ak = out_neu.getOutput(); j++; sumKoutputs = sumKoutputs + (-(desiredOutput - ak) * ak * (1 - ak) * wjk); } double partialDerivative = aj * (1 - aj) * ai * sumKoutputs; double deltaWeight = -learningRate * partialDerivative; double newWeight = con.getWeight() + deltaWeight; con.setDeltaWeight(deltaWeight); con.setWeight(newWeight + momentum * con.getPrevDeltaWeight()); } } }
private void mutateWeights() { for (Connection connection : this.connections) { if (this.randomGenerator.nextDouble() > 0.8) { // 80% chance of mutation continue; } if (this.randomGenerator.nextDouble() > 0.9) { // 10% chance of random reset connection.setWeight(NNGenome.scale(this.randomGenerator.nextDouble(), 0, 1, -1, 1)); } else { // 90% chance of perturbation double perturbation = NNGenome.scale(this.randomGenerator.nextDouble(), 0, 1, -0.01, 0.01); connection.setWeight(connection.getWeight() + perturbation); } } }
/** Take from hash table and put into all weights */ public void updateAllWeights() { // update weights for the output layer for (Neuron n : outputLayer) { ArrayList<Connection> connections = n.getAllInConnections(); for (Connection con : connections) { String key = weightKey(n.id, con.id); double newWeight = weightUpdate.get(key); con.setWeight(newWeight); } } // update weights for the hidden layer for (Neuron n : hiddenLayer) { ArrayList<Connection> connections = n.getAllInConnections(); for (Connection con : connections) { String key = weightKey(n.id, con.id); double newWeight = weightUpdate.get(key); con.setWeight(newWeight); } } }