コード例 #1
0
ファイル: gmdh.java プロジェクト: Navieclipse/KEEL
  /**
   * Main function
   *
   * @param args Arguments to the main method
   * @throws FileNotFoundException
   * @throws IOException
   */
  public static void main(String[] args) throws FileNotFoundException, IOException {

    if (args.length <= 0) {
      System.err.println("No parameters file");
      System.exit(1);
    }

    SetupParameters global = new SetupParameters();
    global.LoadParameters(args[0]);

    OpenDataset train = new OpenDataset();
    OpenDataset test = null;
    OpenDataset validation = null;

    boolean isTrain;
    train.processClassifierDataset(global.train_file, true);

    global.n_test_patterns = 0;
    global.n_train_patterns = train.getndatos();
    if (global.test_data) {
      test = new OpenDataset();
      test.processClassifierDataset(global.test_file, false);
      global.n_test_patterns = test.getndatos();
    }
    global.n_val_patterns = 0;
    if (global.val_data) {
      validation = new OpenDataset();
      validation.processClassifierDataset(global.val_file, false);
      global.n_val_patterns = validation.getndatos();
    }

    // Assign data and parameters to internal variables
    // Number of inputs
    global.Ninputs = 0;
    for (int i = 0; i < train.getnentradas(); i++) {
      if (train.getTiposAt(i) == 0) {
        Vector in_values = train.getRangosVar(i);
        global.Ninputs += in_values.size();
      } else {
        global.Ninputs++;
      }
    }

    // Number of outputs
    if (train.getTiposAt(train.getnentradas()) != 0) {
      global.Noutputs = train.getnsalidas();
    } else {
      Vector out_values = train.getRangosVar(train.getnentradas());

      global.Noutputs = out_values.size();
    }

    Data data =
        new Data(
            global.Ninputs + global.Noutputs, global.n_train_patterns, global.n_test_patterns, 0);

    Genesis.DatasetToArray(data.train, train);
    if (global.test_data) {
      Genesis.DatasetToArray(data.test, test);
    }
    if (global.val_data) {
      Genesis.DatasetToArray(data.validation, validation);
    }

    if (global.tipify_inputs == true) {
      double mean, sigma, sq_sum; /* Tipify input data. */

      /* Scale input. */
      for (int i = 0; i < global.Ninputs; i++) {
        /* Get the mean and variance. */
        mean = sigma = sq_sum = 0.;

        for (int j = 0; j < global.n_train_patterns; j++) {
          mean += data.train[j][i];
          sq_sum += data.train[j][i] * data.train[j][i];
        }

        mean /= global.n_train_patterns;
        sigma = Math.sqrt(sq_sum / global.n_train_patterns - mean * mean);

        /* Tipify: z = (x - mean)/std. dev. */
        /* If std. dev. is 0 do nothing. */
        if (sigma > 0.000001) {
          for (int j = 0; j < global.n_train_patterns; j++) {
            data.train[j][i] = (data.train[j][i] - mean) / sigma;
          }

          for (int j = 0; j < global.n_test_patterns; j++) {
            data.test[j][i] = (data.test[j][i] - mean) / sigma;
          }
        }
      }
    }

    sonn SelfOrganizingNetwork = new sonn(global, data);

    SelfOrganizingNetwork.SaveNetwork("SONN_Network", global.seed, false);

    if (global.problem.compareToIgnoreCase("Classification") == 0) {
      double result =
          SelfOrganizingNetwork.TestSONNInClassification(
              global, data.train, global.n_train_patterns);
      System.out.print("Train accuracy: " + result + "\t");
      result =
          SelfOrganizingNetwork.TestSONNInClassification(global, data.test, global.n_test_patterns);
      System.out.println("Test accuracy: " + result);
    } else {
      double result =
          SelfOrganizingNetwork.TestSONNInRegression(global, data.train, global.n_train_patterns);
      System.out.print("Train accuracy: " + result + "\t");
      result =
          SelfOrganizingNetwork.TestSONNInRegression(global, data.test, global.n_test_patterns);
      System.out.println("Test accuracy: " + result);
    }

    SelfOrganizingNetwork.SaveOutputFile(
        global.train_output, data.train, global.n_train_patterns, global);
    SelfOrganizingNetwork.SaveOutputFile(
        global.test_output, data.test, global.n_test_patterns, global);
  }
コード例 #2
0
ファイル: ConnNetwork.java プロジェクト: RubelAhmed57/KEEL
  /**
   * Method that implements the backpropagation algorithm
   *
   * @param global Parameters of the algorithm
   * @param cycles Number of cycles
   * @param data Data matrix file
   * @param npatterns Number of patterns in data
   */
  public void BackPropagation(Parameters global, int cycles, double data[][], int npatterns) {
    int pattern;
    double change;

    double[] error = new double[Noutputs];

    // Momentum set to 0
    for (int k = 0; k < Nlayers - 1; k++) {
      for (int i = 0; i < Nhidden[k + 1]; i++) {
        for (int j = 0; j < Nhidden[k]; j++) {
          momentum[k][i][j] = 0.0;
        }
      }
    }
    for (int iter = 0; iter < cycles; iter++) {
      // Choose a random pattern
      pattern = Genesis.irandom(0, npatterns);
      // Forward pass
      GenerateOutput(data[pattern]);

      // Obtain error for output nodes
      for (int i = 0; i < Noutputs; i++) {
        error[i] = data[pattern][Ninputs + i] - activation[Nlayers - 1][i];
      }

      // Compute deltas for output
      for (int i = 0; i < Noutputs; i++) {
        if (transfer[Nlayers - 2].compareToIgnoreCase("Log") == 0) {
          delta[Nlayers - 1][i] =
              error[i]
                  * b_log
                  * activation[Nlayers - 1][i]
                  * (1.0 - activation[Nlayers - 1][i] / a);
        } else if (transfer[Nlayers - 2].compareToIgnoreCase("Htan") == 0) {
          delta[Nlayers - 1][i] =
              error[i]
                  * (b_htan / a)
                  * (a - activation[Nlayers - 1][i])
                  * (a + activation[Nlayers - 1][i]);
        } else {
          delta[Nlayers - 1][i] = error[i];
        }
      }

      // Compute deltas for hidden nodes
      for (int k = Nlayers - 2; k > 0; k--) {
        for (int i = 0; i < Nhidden[k]; i++) {
          delta[k][i] = 0.0;
          for (int j = 0; j < Nhidden[k + 1]; j++) {
            delta[k][i] += delta[k + 1][j] * w[k][j][i];
          }
          if (transfer[k - 1].compareToIgnoreCase("Log") == 0) {
            delta[k][i] *= b_log * activation[k][i] * (1.0 - activation[k][i] / a);
          } else if (transfer[k - 1].compareToIgnoreCase("Htan") == 0) {
            delta[k][i] *= (b_htan / a) * (a - activation[k][i]) * (a + activation[k][i]);
          }
        }
      }

      // Update weights
      for (int k = Nlayers - 2; k >= 0; k--) {
        for (int i = 0; i < Nhidden[k + 1]; i++) {
          for (int j = 0; j < Nhidden[k]; j++) {
            if (conns[k][i][j]) {
              change =
                  global.eta * delta[k + 1][i] * activation[k][j]
                      + global.alpha * momentum[k][i][j]
                      - global.lambda * w[k][i][j];
              w[k][i][j] += change;
              momentum[k][i][j] = change;
            }
          }
        }
      }
    }
  }