public static void saveCSV(File targetFile, CSVFormat format, MLDataSet set) {
    try {
      FileWriter outFile = new FileWriter(targetFile);
      PrintWriter out = new PrintWriter(outFile);

      for (MLDataPair data : set) {
        StringBuilder line = new StringBuilder();

        for (int i = 0; i < data.getInput().size(); i++) {
          double d = data.getInput().getData(i);
          BasicFile.appendSeparator(line, format);
          line.append(format.format(d, Encog.DEFAULT_PRECISION));
        }

        for (int i = 0; i < data.getIdeal().size(); i++) {
          double d = data.getIdeal().getData(i);
          BasicFile.appendSeparator(line, format);
          line.append(format.format(d, Encog.DEFAULT_PRECISION));
        }

        out.println(line);
      }

      out.close();
      outFile.close();

    } catch (IOException ex) {
      throw new EncogError(ex);
    }
  }
  public void recognizer(List<File> files) {

    FeatureExtractor fe = new FeatureExtractor();
    MLDataSet trainingSet = new BasicMLDataSet();
    for (File f : files) {
      // System.out.println(f.getAbsolutePath());

      List<double[]> data;
      try {
        data = fe.fileProcessor(f);
        MLData mldataIn = new BasicMLData(data.get(0));
        double[] out = new double[NUM_OUT];
        Integer index = new Integer(Labeler.getLabel(f));
        // System.out.println(index+""+data.get(0));
        out[index] = 1.;
        System.out.println(out.toString());
        MLData mldataout = new BasicMLData(out);
        trainingSet.add(mldataIn, mldataout);
      } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }
    }

    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(NUM_IN));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 4 * NUM_IN));
    // network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 2 * NUM_IN));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, NUM_OUT));
    network.getStructure().finalizeStructure();
    network.reset();

    // train the neural network
    ResilientPropagation train = new ResilientPropagation(network, trainingSet);

    System.out.println("Training Set: " + trainingSet.size());

    int epoch = 1;

    do {
      train.iteration();
      System.out.println("Epoch:" + epoch + " Error-->" + train.getError());
      epoch++;
    } while (train.getError() > 0.001);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for (MLDataPair pair : trainingSet) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(
          "actual-->" + Labeler.getWord(output) + ", ideal-->" + Labeler.getWord(pair.getIdeal()));
    }

    Encog.getInstance().shutdown();
  }
  public static double calculateClassificationError(MLClassification method, MLDataSet data) {
    int total = 0;
    int correct = 0;

    for (MLDataPair pair : data) {
      int ideal = (int) pair.getIdeal().getData(0);
      int actual = method.classify(pair.getInput());
      if (actual == ideal) correct++;
      total++;
    }
    return (double) (total - correct) / (double) total;
  }
  public static double calculateRegressionError(MLRegression method, MLDataSet data) {

    final ErrorCalculation errorCalculation = new ErrorCalculation();
    if (method instanceof MLContext) ((MLContext) method).clearContext();

    for (final MLDataPair pair : data) {
      final MLData actual = method.compute(pair.getInput());
      errorCalculation.updateError(
          actual.getData(), pair.getIdeal().getData(), pair.getSignificance());
    }
    return errorCalculation.calculate();
  }
 /**
  * Evaluate the network and display (to the console) the output for every value in the training
  * set. Displays ideal and actual.
  *
  * @param network The network to evaluate.
  * @param training The training set to evaluate.
  */
 public static void evaluate(final MLRegression network, final MLDataSet training) {
   for (final MLDataPair pair : training) {
     final MLData output = network.compute(pair.getInput());
     System.out.println(
         "Input="
             + EncogUtility.formatNeuralData(pair.getInput())
             + ", Actual="
             + EncogUtility.formatNeuralData(output)
             + ", Ideal="
             + EncogUtility.formatNeuralData(pair.getIdeal()));
   }
 }
Example #6
0
  /**
   * The main method.
   *
   * @param args No arguments are used.
   */
  public static void main(final String args[]) {

    // create a neural network, without using a factory
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null, true, 2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
    network.getStructure().finalizeStructure();
    network.reset();

    // create training data
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final StochasticGradientDescent train = new StochasticGradientDescent(network, trainingSet);
    train.setUpdateRule(new RMSPropUpdate());

    int epoch = 1;

    do {
      train.iteration();
      System.out.println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while (train.getError() > 0.01);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for (MLDataPair pair : trainingSet) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(
          pair.getInput().getData(0)
              + ","
              + pair.getInput().getData(1)
              + ", actual="
              + output.getData(0)
              + ",ideal="
              + pair.getIdeal().getData(0));
    }

    PerturbationFeatureImportanceCalc d;

    Encog.getInstance().shutdown();
  }