/**
   * all output propagate back
   *
   * @param expectedOutput first calculate the partial derivative of the error with respect to each
   *     of the weight leading into the output neurons bias is also updated here
   */
  public void applyBackpropagation(double expectedOutput[]) {

    // error check, normalize value ]0;1[
    for (int i = 0; i < expectedOutput.length; i++) {
      double d = expectedOutput[i];
      if (d < 0 || d > 1) {
        if (d < 0) expectedOutput[i] = 0 + epsilon;
        else expectedOutput[i] = 1 - epsilon;
      }
    }

    int i = 0;
    for (Neuron n : outputLayer) {
      ArrayList<Connection> connections = n.getAllInConnections();
      for (Connection con : connections) {
        double ak = n.getOutput();
        double ai = con.leftNeuron.getOutput();
        double desiredOutput = expectedOutput[i];

        double partialDerivative = -ak * (1 - ak) * ai * (desiredOutput - ak);
        double deltaWeight = -learningRate * partialDerivative;
        double newWeight = con.getWeight() + deltaWeight;
        con.setDeltaWeight(deltaWeight);
        con.setWeight(newWeight + momentum * con.getPrevDeltaWeight());
      }
      i++;
    }

    // update weights for the hidden layer
    for (Neuron n : hiddenLayer) {
      ArrayList<Connection> connections = n.getAllInConnections();
      for (Connection con : connections) {
        double aj = n.getOutput();
        double ai = con.leftNeuron.getOutput();
        double sumKoutputs = 0;
        int j = 0;
        for (Neuron out_neu : outputLayer) {
          double wjk = out_neu.getConnection(n.id).getWeight();
          double desiredOutput = (double) expectedOutput[j];
          double ak = out_neu.getOutput();
          j++;
          sumKoutputs = sumKoutputs + (-(desiredOutput - ak) * ak * (1 - ak) * wjk);
        }

        double partialDerivative = aj * (1 - aj) * ai * sumKoutputs;
        double deltaWeight = -learningRate * partialDerivative;
        double newWeight = con.getWeight() + deltaWeight;
        con.setDeltaWeight(deltaWeight);
        con.setWeight(newWeight + momentum * con.getPrevDeltaWeight());
      }
    }
  }
  public void printWeightUpdate() {

    String fileName = "WeightFile.txt";
    try {

      FileWriter fileWriter = new FileWriter(fileName);

      BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);

      System.out.println(
          "printWeightUpdate, put this i trainedWeights() and set isTrained to true");
      // weights for the hidden layer
      for (Neuron n : hiddenLayer) {
        ArrayList<Connection> connections = n.getAllInConnections();
        for (Connection con : connections) {
          String w = df.format(con.getWeight());
          System.out.println(
              "weightUpdate.put(weightKey(" + n.id + ", " + con.id + "), " + w + ");");

          bufferedWriter.write(ef.format(n.id));
          bufferedWriter.write(" ");
          bufferedWriter.write(ef.format(con.id));
          bufferedWriter.write(" ");
          bufferedWriter.write(w);
          bufferedWriter.newLine();
        }
      }
      // weights for the output layer
      for (Neuron n : outputLayer) {
        ArrayList<Connection> connections = n.getAllInConnections();
        for (Connection con : connections) {
          String w = df.format(con.getWeight());
          System.out.println(
              "weightUpdate.put(weightKey(" + n.id + ", " + con.id + "), " + w + ");");

          bufferedWriter.write(ef.format(n.id));
          bufferedWriter.write(" ");
          bufferedWriter.write(ef.format(con.id));
          bufferedWriter.write(" ");
          bufferedWriter.write(w);
          bufferedWriter.newLine();
        }
      }
      System.out.println();
      bufferedWriter.close();
    } catch (IOException ex) {

      System.out.println("Error writing to file " + fileName);
    }
  }
 public void printAllWeights() {
   System.out.println("printAllWeights");
   // weights for the hidden layer
   for (Neuron n : hiddenLayer) {
     ArrayList<Connection> connections = n.getAllInConnections();
     for (Connection con : connections) {
       double w = con.getWeight();
       System.out.println("n=" + n.id + " c=" + con.id + " w=" + w);
     }
   }
   // weights for the output layer
   for (Neuron n : outputLayer) {
     ArrayList<Connection> connections = n.getAllInConnections();
     for (Connection con : connections) {
       double w = con.getWeight();
       System.out.println("n=" + n.id + " c=" + con.id + " w=" + w);
     }
   }
   System.out.println();
 }
Beispiel #4
0
 private void addNode() {
   int connectionIndex = this.randomGenerator.nextInt(this.connections.size());
   Connection connection = this.connections.get(connectionIndex);
   connection.disable();
   int nodeId =
       (this.hiddenNodes.isEmpty()
               ? this.outputNodes.get(this.outputNodes.size() - 1).getId()
               : this.hiddenNodes.get(this.hiddenNodes.size() - 1).getId())
           + 1;
   this.hiddenNodes.add(new Node(nodeId, NodeType.HIDDEN));
   this.connections.add(new Connection(connection.getFrom(), nodeId, 1));
   this.connections.add(new Connection(nodeId, connection.getTo(), connection.getWeight()));
 }
Beispiel #5
0
 private void mutateWeights() {
   for (Connection connection : this.connections) {
     if (this.randomGenerator.nextDouble() > 0.8) {
       // 80% chance of mutation
       continue;
     }
     if (this.randomGenerator.nextDouble() > 0.9) {
       // 10% chance of random reset
       connection.setWeight(NNGenome.scale(this.randomGenerator.nextDouble(), 0, 1, -1, 1));
     } else { // 90% chance of perturbation
       double perturbation = NNGenome.scale(this.randomGenerator.nextDouble(), 0, 1, -0.01, 0.01);
       connection.setWeight(connection.getWeight() + perturbation);
     }
   }
 }