/**
   * Load a CSV file into a memory dataset.
   *
   * @param format The CSV format to use.
   * @param filename The filename to load.
   * @param headers True if there is a header line.
   * @param inputSize The input size. Input always comes first in a file.
   * @param idealSize The ideal size, 0 for unsupervised.
   * @return A NeuralDataSet that holds the contents of the CSV file.
   */
  public static MLDataSet loadCSVTOMemory(
      CSVFormat format, String filename, boolean headers, int inputSize, int idealSize) {
    MLDataSet result = new BasicMLDataSet();
    ReadCSV csv = new ReadCSV(filename, headers, format);
    while (csv.next()) {
      MLData input = null;
      MLData ideal = null;
      int index = 0;

      input = new BasicMLData(inputSize);
      for (int i = 0; i < inputSize; i++) {
        double d = csv.getDouble(index++);
        input.setData(i, d);
      }

      if (idealSize > 0) {
        ideal = new BasicMLData(idealSize);
        for (int i = 0; i < idealSize; i++) {
          double d = csv.getDouble(index++);
          ideal.setData(i, d);
        }
      }

      MLDataPair pair = new BasicMLDataPair(input, ideal);
      result.add(pair);
    }

    return result;
  }
Exemplo n.º 2
0
  /**
   * Called when the recognize button is pressed.
   *
   * @param event The event.
   */
  void recognize_actionPerformed(final java.awt.event.ActionEvent event) {
    if (this.net == null) {
      JOptionPane.showMessageDialog(
          this, "I need to be trained first!", "Error", JOptionPane.ERROR_MESSAGE);
      return;
    }
    this.entry.downSample();

    final MLData input = new BasicMLData(5 * 7);
    int idx = 0;
    final SampleData ds = this.sample.getData();
    for (int y = 0; y < ds.getHeight(); y++) {
      for (int x = 0; x < ds.getWidth(); x++) {
        input.setData(idx++, ds.getData(x, y) ? .5 : -.5);
      }
    }

    final int best = this.net.classify(input);
    final char map[] = mapNeurons();
    JOptionPane.showMessageDialog(
        this,
        "  " + map[best] + "   (Neuron #" + best + " fired)",
        "That Letter Is",
        JOptionPane.PLAIN_MESSAGE);
    clear_actionPerformed(null);
  }
Exemplo n.º 3
0
  /** Run method for the background training thread. */
  public void trainSOM() {
    try {
      final int inputNeuron = OCR.DOWNSAMPLE_HEIGHT * OCR.DOWNSAMPLE_WIDTH;
      final int outputNeuron = this.letterListModel.size();

      final MLDataSet trainingSet = new BasicMLDataSet();
      for (int t = 0; t < this.letterListModel.size(); t++) {
        final MLData item = new BasicMLData(inputNeuron);
        int idx = 0;
        final SampleData ds = (SampleData) this.letterListModel.getElementAt(t);
        for (int y = 0; y < ds.getHeight(); y++) {
          for (int x = 0; x < ds.getWidth(); x++) {
            item.setData(idx++, ds.getData(x, y) ? .5 : -.5);
          }
        }

        trainingSet.add(new BasicMLDataPair(item, null));
      }

      this.net = new SOM(inputNeuron, outputNeuron);
      this.net.reset();

      SOMClusterCopyTraining train = new SOMClusterCopyTraining(this.net, trainingSet);

      train.iteration();

      JOptionPane.showMessageDialog(
          this, "Training has completed.", "Training", JOptionPane.PLAIN_MESSAGE);

    } catch (final Exception e) {
      e.printStackTrace();
      JOptionPane.showMessageDialog(this, "Error: " + e, "Training", JOptionPane.ERROR_MESSAGE);
    }
  }
Exemplo n.º 4
0
  /**
   * Used to map neurons to actual letters.
   *
   * @return The current mapping between neurons and letters as an array.
   */
  char[] mapNeurons() {
    final char map[] = new char[this.letterListModel.size()];

    for (int i = 0; i < map.length; i++) {
      map[i] = '?';
    }
    for (int i = 0; i < this.letterListModel.size(); i++) {
      final MLData input = new BasicMLData(5 * 7);
      int idx = 0;
      final SampleData ds = (SampleData) this.letterListModel.getElementAt(i);
      for (int y = 0; y < ds.getHeight(); y++) {
        for (int x = 0; x < ds.getWidth(); x++) {
          input.setData(idx++, ds.getData(x, y) ? .5 : -.5);
        }
      }

      final int best = this.net.classify(input);
      map[best] = ds.getLetter();
    }
    return map;
  }
Exemplo n.º 5
0
  public MLMethod decode(final NEATPopulation pop, final Substrate substrate, final Genome genome) {
    // obtain the CPPN
    final NEATCODEC neatCodec = new NEATCODEC();
    final NEATNetwork cppn = (NEATNetwork) neatCodec.decode(genome);

    final List<NEATLink> linkList = new ArrayList<NEATLink>();

    final ActivationFunction[] afs = new ActivationFunction[substrate.getNodeCount()];

    final ActivationFunction af = new ActivationSteepenedSigmoid();
    // all activation functions are the same
    for (int i = 0; i < afs.length; i++) {
      afs[i] = af;
    }

    final double c = this.maxWeight / (1.0 - this.minWeight);
    final MLData input = new BasicMLData(cppn.getInputCount());

    // First create all of the non-bias links.
    for (final SubstrateLink link : substrate.getLinks()) {
      final SubstrateNode source = link.getSource();
      final SubstrateNode target = link.getTarget();

      int index = 0;
      for (final double d : source.getLocation()) {
        input.setData(index++, d);
      }
      for (final double d : target.getLocation()) {
        input.setData(index++, d);
      }
      final MLData output = cppn.compute(input);

      double weight = output.getData(0);
      if (Math.abs(weight) > this.minWeight) {
        weight = (Math.abs(weight) - this.minWeight) * c * Math.signum(weight);
        linkList.add(new NEATLink(source.getId(), target.getId(), weight));
      }
    }

    // now create biased links
    input.clear();
    final int d = substrate.getDimensions();
    final List<SubstrateNode> biasedNodes = substrate.getBiasedNodes();
    for (final SubstrateNode target : biasedNodes) {
      for (int i = 0; i < d; i++) {
        input.setData(d + i, target.getLocation()[i]);
      }

      final MLData output = cppn.compute(input);

      double biasWeight = output.getData(1);
      if (Math.abs(biasWeight) > this.minWeight) {
        biasWeight = (Math.abs(biasWeight) - this.minWeight) * c * Math.signum(biasWeight);
        linkList.add(new NEATLink(0, target.getId(), biasWeight));
      }
    }

    // check for invalid neural network
    if (linkList.size() == 0) {
      return null;
    }

    Collections.sort(linkList);

    final NEATNetwork network =
        new NEATNetwork(substrate.getInputCount(), substrate.getOutputCount(), linkList, afs);

    network.setActivationCycles(substrate.getActivationCycles());
    return network;
  }