Ejemplo n.º 1
0
  public static void saveCSV(File targetFile, CSVFormat format, MLDataSet set) {
    try {
      FileWriter outFile = new FileWriter(targetFile);
      PrintWriter out = new PrintWriter(outFile);

      for (MLDataPair data : set) {
        StringBuilder line = new StringBuilder();

        for (int i = 0; i < data.getInput().size(); i++) {
          double d = data.getInput().getData(i);
          BasicFile.appendSeparator(line, format);
          line.append(format.format(d, Encog.DEFAULT_PRECISION));
        }

        for (int i = 0; i < data.getIdeal().size(); i++) {
          double d = data.getIdeal().getData(i);
          BasicFile.appendSeparator(line, format);
          line.append(format.format(d, Encog.DEFAULT_PRECISION));
        }

        out.println(line);
      }

      out.close();
      outFile.close();

    } catch (IOException ex) {
      throw new EncogError(ex);
    }
  }
Ejemplo n.º 2
0
  public void recognizer(List<File> files) {

    FeatureExtractor fe = new FeatureExtractor();
    MLDataSet trainingSet = new BasicMLDataSet();
    for (File f : files) {
      // System.out.println(f.getAbsolutePath());

      List<double[]> data;
      try {
        data = fe.fileProcessor(f);
        MLData mldataIn = new BasicMLData(data.get(0));
        double[] out = new double[NUM_OUT];
        Integer index = new Integer(Labeler.getLabel(f));
        // System.out.println(index+""+data.get(0));
        out[index] = 1.;
        System.out.println(out.toString());
        MLData mldataout = new BasicMLData(out);
        trainingSet.add(mldataIn, mldataout);
      } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }
    }

    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(NUM_IN));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 4 * NUM_IN));
    // network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 2 * NUM_IN));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, NUM_OUT));
    network.getStructure().finalizeStructure();
    network.reset();

    // train the neural network
    ResilientPropagation train = new ResilientPropagation(network, trainingSet);

    System.out.println("Training Set: " + trainingSet.size());

    int epoch = 1;

    do {
      train.iteration();
      System.out.println("Epoch:" + epoch + " Error-->" + train.getError());
      epoch++;
    } while (train.getError() > 0.001);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for (MLDataPair pair : trainingSet) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(
          "actual-->" + Labeler.getWord(output) + ", ideal-->" + Labeler.getWord(pair.getIdeal()));
    }

    Encog.getInstance().shutdown();
  }
Ejemplo n.º 3
0
  public static double calculateClassificationError(MLClassification method, MLDataSet data) {
    int total = 0;
    int correct = 0;

    for (MLDataPair pair : data) {
      int ideal = (int) pair.getIdeal().getData(0);
      int actual = method.classify(pair.getInput());
      if (actual == ideal) correct++;
      total++;
    }
    return (double) (total - correct) / (double) total;
  }
Ejemplo n.º 4
0
  public static double calculateRegressionError(MLRegression method, MLDataSet data) {

    final ErrorCalculation errorCalculation = new ErrorCalculation();
    if (method instanceof MLContext) ((MLContext) method).clearContext();

    for (final MLDataPair pair : data) {
      final MLData actual = method.compute(pair.getInput());
      errorCalculation.updateError(
          actual.getData(), pair.getIdeal().getData(), pair.getSignificance());
    }
    return errorCalculation.calculate();
  }
Ejemplo n.º 5
0
 /**
  * Evaluate the network and display (to the console) the output for every value in the training
  * set. Displays ideal and actual.
  *
  * @param network The network to evaluate.
  * @param training The training set to evaluate.
  */
 public static void evaluate(final MLRegression network, final MLDataSet training) {
   for (final MLDataPair pair : training) {
     final MLData output = network.compute(pair.getInput());
     System.out.println(
         "Input="
             + EncogUtility.formatNeuralData(pair.getInput())
             + ", Actual="
             + EncogUtility.formatNeuralData(output)
             + ", Ideal="
             + EncogUtility.formatNeuralData(pair.getIdeal()));
   }
 }
Ejemplo n.º 6
0
  /**
   * Calculate the error for this neural network. The error is calculated using
   * root-mean-square(RMS).
   *
   * @param data The training set.
   * @return The error percentage.
   */
  public double calculateError(final MLDataSet data) {
    final ErrorCalculation errorCalculation = new ErrorCalculation();

    final double[] actual = new double[this.outputCount];
    final MLDataPair pair = BasicMLDataPair.createPair(data.getInputSize(), data.getIdealSize());

    for (int i = 0; i < data.getRecordCount(); i++) {
      data.getRecord(i, pair);
      compute(pair.getInputArray(), actual);
      errorCalculation.updateError(actual, pair.getIdealArray(), pair.getSignificance());
    }
    return errorCalculation.calculate();
  }
  public static ObjectPair<double[][], double[][]> trainingToArray(MLDataSet training) {
    int length = (int) training.getRecordCount();
    double[][] a = new double[length][training.getInputSize()];
    double[][] b = new double[length][training.getIdealSize()];

    int index = 0;
    for (MLDataPair pair : training) {
      EngineArray.arrayCopy(pair.getInputArray(), a[index]);
      EngineArray.arrayCopy(pair.getIdealArray(), b[index]);
      index++;
    }

    return new ObjectPair<double[][], double[][]>(a, b);
  }
  /** Approximate the weights based on the input values. */
  private void initWeights() {

    if (this.training.getRecordCount() != this.network.getInstarCount()) {
      throw new NeuralNetworkError(
          "If the weights are to be set from the "
              + "training data, then there must be one instar "
              + "neuron for each training element.");
    }

    int i = 0;
    for (final MLDataPair pair : this.training) {
      for (int j = 0; j < this.network.getInputCount(); j++) {
        this.network.getWeightsInputToInstar().set(j, i, pair.getInput().getData(j));
      }
      i++;
    }
    this.mustInit = false;
  }
Ejemplo n.º 9
0
  /**
   * The main method.
   *
   * @param args No arguments are used.
   */
  public static void main(final String args[]) {

    // create a neural network, without using a factory
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null, true, 2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
    network.getStructure().finalizeStructure();
    network.reset();

    // create training data
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final StochasticGradientDescent train = new StochasticGradientDescent(network, trainingSet);
    train.setUpdateRule(new RMSPropUpdate());

    int epoch = 1;

    do {
      train.iteration();
      System.out.println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while (train.getError() > 0.01);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for (MLDataPair pair : trainingSet) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(
          pair.getInput().getData(0)
              + ","
              + pair.getInput().getData(1)
              + ", actual="
              + output.getData(0)
              + ",ideal="
              + pair.getIdeal().getData(0));
    }

    PerturbationFeatureImportanceCalc d;

    Encog.getInstance().shutdown();
  }
Ejemplo n.º 10
0
  /** {@inheritDoc} */
  @Override
  public final void iteration() {

    if (this.mustInit) {
      initWeights();
    }

    double worstDistance = Double.NEGATIVE_INFINITY;

    for (final MLDataPair pair : this.training) {
      final MLData out = this.network.computeInstar(pair.getInput());

      // determine winner
      final int winner = EngineArray.indexOfLargest(out.getData());

      // calculate the distance
      double distance = 0;
      for (int i = 0; i < pair.getInput().size(); i++) {
        final double diff =
            pair.getInput().getData(i) - this.network.getWeightsInputToInstar().get(i, winner);
        distance += diff * diff;
      }
      distance = BoundMath.sqrt(distance);

      if (distance > worstDistance) {
        worstDistance = distance;
      }

      // train
      for (int j = 0; j < this.network.getInputCount(); j++) {
        final double delta =
            this.learningRate
                * (pair.getInput().getData(j)
                    - this.network.getWeightsInputToInstar().get(j, winner));

        this.network.getWeightsInputToInstar().add(j, winner, delta);
      }
    }

    setError(worstDistance);
  }
Ejemplo n.º 11
0
  /** {@inheritDoc} */
  @Override
  public double calculateError(final MLDataSet data) {

    if (!this.hasValidClassificationTarget()) return 1.0;

    // do the following just to throw an error if there is no classification target
    getClassificationTarget();

    int badCount = 0;
    int totalCount = 0;

    for (MLDataPair pair : data) {
      int c = this.classify(pair.getInput());
      totalCount++;
      if (c != pair.getInput().getData(this.classificationTarget)) {
        badCount++;
      }
    }

    return (double) badCount / (double) totalCount;
  }