Beispiel #1
0
  public static void main(String[] args) {
    Logging.stopConsoleLogging();
    NeuralDataSet trainingSet = new BasicNeuralDataSet(XOR_INPUT, XOR_IDEAL);
    BasicNetwork network = EncogUtility.simpleFeedForward(2, 4, 0, 1, false);
    ResilientPropagation train = new ResilientPropagation(network, trainingSet);
    train.addStrategy(new RequiredImprovementStrategy(5));

    System.out.println("Perform initial train.");
    EncogUtility.trainToError(train, network, trainingSet, 0.01);
    TrainingContinuation cont = train.pause();
    System.out.println(
        Arrays.toString((double[]) cont.getContents().get(ResilientPropagation.LAST_GRADIENTS)));
    System.out.println(
        Arrays.toString((double[]) cont.getContents().get(ResilientPropagation.UPDATE_VALUES)));

    try {
      SerializeObject.save("resume.ser", cont);
      cont = (TrainingContinuation) SerializeObject.load("resume.ser");
    } catch (Exception ex) {
      ex.printStackTrace();
    }

    System.out.println(
        "Now trying a second train, with continue from the first.  Should stop after one iteration");
    ResilientPropagation train2 = new ResilientPropagation(network, trainingSet);
    train2.resume(cont);
    EncogUtility.trainToError(train2, network, trainingSet, 0.01);
  }
  public void recognizer(List<File> files) {

    FeatureExtractor fe = new FeatureExtractor();
    MLDataSet trainingSet = new BasicMLDataSet();
    for (File f : files) {
      // System.out.println(f.getAbsolutePath());

      List<double[]> data;
      try {
        data = fe.fileProcessor(f);
        MLData mldataIn = new BasicMLData(data.get(0));
        double[] out = new double[NUM_OUT];
        Integer index = new Integer(Labeler.getLabel(f));
        // System.out.println(index+""+data.get(0));
        out[index] = 1.;
        System.out.println(out.toString());
        MLData mldataout = new BasicMLData(out);
        trainingSet.add(mldataIn, mldataout);
      } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }
    }

    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(NUM_IN));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 4 * NUM_IN));
    // network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 2 * NUM_IN));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, NUM_OUT));
    network.getStructure().finalizeStructure();
    network.reset();

    // train the neural network
    ResilientPropagation train = new ResilientPropagation(network, trainingSet);

    System.out.println("Training Set: " + trainingSet.size());

    int epoch = 1;

    do {
      train.iteration();
      System.out.println("Epoch:" + epoch + " Error-->" + train.getError());
      epoch++;
    } while (train.getError() > 0.001);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for (MLDataPair pair : trainingSet) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(
          "actual-->" + Labeler.getWord(output) + ", ideal-->" + Labeler.getWord(pair.getIdeal()));
    }

    Encog.getInstance().shutdown();
  }
Beispiel #3
0
  public static double evaluateMPROP(BasicNetwork network, NeuralDataSet data) {

    ResilientPropagation train = new ResilientPropagation(network, data);
    train.setNumThreads(0);
    long start = System.currentTimeMillis();
    System.out.println("Training 20 Iterations with MPROP");
    for (int i = 1; i <= 20; i++) {
      train.iteration();
      System.out.println("Iteration #" + i + " Error:" + train.getError());
    }
    train.finishTraining();
    long stop = System.currentTimeMillis();
    double diff = ((double) (stop - start)) / 1000.0;
    System.out.println("MPROP Result:" + diff + " seconds.");
    System.out.println("Final MPROP error: " + network.calculateError(data));
    return diff;
  }
  /**
   * The main method.
   *
   * @param args No arguments are used.
   */
  public static void main(final String args[]) {

    // create a neural network, without using a factory
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null, true, 2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
    network.getStructure().finalizeStructure();
    network.reset();

    // create training data
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final ResilientPropagation train = new ResilientPropagation(network, trainingSet);

    int epoch = 1;

    do {
      train.iteration();
      System.out.println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while (train.getError() > 0.01);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for (MLDataPair pair : trainingSet) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(
          pair.getInput().getData(0)
              + ","
              + pair.getInput().getData(1)
              + ", actual="
              + output.getData(0)
              + ",ideal="
              + pair.getIdeal().getData(0));
    }

    Encog.getInstance().shutdown();
  }
  private void processTrain() throws IOException {
    final String strMode = getArg("mode");
    final String strMinutes = getArg("minutes");
    final String strStrategyError = getArg("strategyerror");
    final String strStrategyCycles = getArg("strategycycles");

    System.out.println("Training Beginning... Output patterns=" + this.outputCount);

    final double strategyError = Double.parseDouble(strStrategyError);
    final int strategyCycles = Integer.parseInt(strStrategyCycles);

    final ResilientPropagation train = new ResilientPropagation(this.network, this.training);
    train.addStrategy(new ResetStrategy(strategyError, strategyCycles));

    if (strMode.equalsIgnoreCase("gui")) {
      EncogUtility.trainDialog(train, this.network, this.training);
    } else {
      final int minutes = Integer.parseInt(strMinutes);
      EncogUtility.trainConsole(train, this.network, this.training, minutes);
    }
    System.out.println("Training Stopped...");
  }