/**
   * Convert to an array. This is used with some training algorithms that require that the "memory"
   * of the neuron(the weight and bias values) be expressed as a linear array.
   *
   * @param network The network to encode.
   * @return The memory of the neuron.
   */
  public static double[] networkToArray(final BasicNetwork network) {
    final int size = network.getStructure().calculateSize();

    // allocate an array to hold
    final double[] result = new double[size];

    int index = 0;

    for (final Layer layer : network.getStructure().getLayers()) {
      // process layer bias
      if (layer.hasBias()) {
        for (int i = 0; i < layer.getNeuronCount(); i++) {
          result[index++] = layer.getBiasWeight(i);
        }
      }

      // process synapses
      for (final Synapse synapse : network.getStructure().getPreviousSynapses(layer)) {
        if (synapse.getMatrix() != null) {
          // process each weight matrix
          for (int x = 0; x < synapse.getToNeuronCount(); x++) {
            for (int y = 0; y < synapse.getFromNeuronCount(); y++) {
              result[index++] = synapse.getMatrix().get(y, x);
            }
          }
        }
      }
    }

    return result;
  }
  public void trainAndSave() {
    System.out.println("Training XOR network to under 1% error rate.");
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(2));
    network.addLayer(new BasicLayer(2));
    network.addLayer(new BasicLayer(1));
    network.getStructure().finalizeStructure();
    network.reset();

    NeuralDataSet trainingSet = new BasicNeuralDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final Train train = new ResilientPropagation(network, trainingSet);

    do {
      train.iteration();
    } while (train.getError() > 0.009);

    double e = network.calculateError(trainingSet);
    System.out.println("Network traiined to error: " + e);

    System.out.println("Saving network");
    final EncogPersistedCollection encog = new EncogPersistedCollection(FILENAME);
    encog.create();
    encog.add("network", network);
  }
Beispiel #3
0
  @Test
  public void testDualOutput() {

    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null, true, 2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 2));
    network.getStructure().finalizeStructure();

    (new ConsistentRandomizer(-1, 1)).randomize(network);

    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT, XOR.XOR_IDEAL2);

    HessianFD testFD = new HessianFD();
    testFD.init(network, trainingData);
    testFD.compute();

    // dump(testFD, "FD");

    HessianCR testCR = new HessianCR();
    testCR.init(network, trainingData);
    testCR.compute();

    // dump(testCR, "CR");
    Assert.assertTrue(testCR.getHessianMatrix().equals(testFD.getHessianMatrix(), 4));
  }
  /**
   * Generate the network.
   *
   * @return The generated network.
   */
  public BasicNetwork generate() {

    Layer input, instar, outstar;
    int y = PatternConst.START_Y;

    final BasicNetwork network = new BasicNetwork();
    network.addLayer(input = new BasicLayer(new ActivationLinear(), false, this.inputCount));
    network.addLayer(instar = new BasicLayer(new ActivationCompetitive(), false, this.instarCount));
    network.addLayer(outstar = new BasicLayer(new ActivationLinear(), false, this.outstarCount));
    network.getStructure().finalizeStructure();
    network.reset();

    input.setX(PatternConst.START_X);
    input.setY(y);
    y += PatternConst.INC_Y;

    instar.setX(PatternConst.START_X);
    instar.setY(y);
    y += PatternConst.INC_Y;

    outstar.setX(PatternConst.START_X);
    outstar.setY(y);

    // tag as needed
    network.tagLayer(BasicNetwork.TAG_INPUT, input);
    network.tagLayer(BasicNetwork.TAG_OUTPUT, outstar);
    network.tagLayer(CPNPattern.TAG_INSTAR, instar);
    network.tagLayer(CPNPattern.TAG_OUTSTAR, outstar);

    return network;
  }
  /**
   * Create a feed forward network.
   *
   * @param architecture The architecture string to use.
   * @param input The input count.
   * @param output The output count.
   * @return The feedforward network.
   */
  public final MLMethod create(final String architecture, final int input, final int output) {

    if (input <= 0) {
      throw new EncogError("Must have at least one input for feedforward.");
    }

    if (output <= 0) {
      throw new EncogError("Must have at least one output for feedforward.");
    }

    final BasicNetwork result = new BasicNetwork();
    final List<String> layers = ArchitectureParse.parseLayers(architecture);
    ActivationFunction af = new ActivationLinear();

    int questionPhase = 0;
    for (final String layerStr : layers) {
      int defaultCount;
      // determine default
      if (questionPhase == 0) {
        defaultCount = input;
      } else {
        defaultCount = output;
      }

      final ArchitectureLayer layer = ArchitectureParse.parseLayer(layerStr, defaultCount);
      final boolean bias = layer.isBias();

      String part = layer.getName();
      if (part != null) {
        part = part.trim();
      } else {
        part = "";
      }

      ActivationFunction lookup = this.factory.create(part);

      if (lookup != null) {
        af = lookup;
      } else {
        if (layer.isUsedDefault()) {
          questionPhase++;
          if (questionPhase > 2) {
            throw new EncogError("Only two ?'s may be used.");
          }
        }

        if (layer.getCount() == 0) {
          throw new EncogError(
              "Unknown architecture element: " + architecture + ", can't parse: " + part);
        }

        result.addLayer(new BasicLayer(af, bias, layer.getCount()));
      }
    }

    result.getStructure().finalizeStructure();
    result.reset();

    return result;
  }
Beispiel #6
0
 public static BasicNetwork generateNetwork() {
   final BasicNetwork network = new BasicNetwork();
   network.addLayer(new BasicLayer(MultiBench.INPUT_COUNT));
   network.addLayer(new BasicLayer(MultiBench.HIDDEN_COUNT));
   network.addLayer(new BasicLayer(MultiBench.OUTPUT_COUNT));
   network.getStructure().finalizeStructure();
   network.reset();
   return network;
 }
  public void recognizer(List<File> files) {

    FeatureExtractor fe = new FeatureExtractor();
    MLDataSet trainingSet = new BasicMLDataSet();
    for (File f : files) {
      // System.out.println(f.getAbsolutePath());

      List<double[]> data;
      try {
        data = fe.fileProcessor(f);
        MLData mldataIn = new BasicMLData(data.get(0));
        double[] out = new double[NUM_OUT];
        Integer index = new Integer(Labeler.getLabel(f));
        // System.out.println(index+""+data.get(0));
        out[index] = 1.;
        System.out.println(out.toString());
        MLData mldataout = new BasicMLData(out);
        trainingSet.add(mldataIn, mldataout);
      } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }
    }

    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(NUM_IN));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 4 * NUM_IN));
    // network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 2 * NUM_IN));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, NUM_OUT));
    network.getStructure().finalizeStructure();
    network.reset();

    // train the neural network
    ResilientPropagation train = new ResilientPropagation(network, trainingSet);

    System.out.println("Training Set: " + trainingSet.size());

    int epoch = 1;

    do {
      train.iteration();
      System.out.println("Epoch:" + epoch + " Error-->" + train.getError());
      epoch++;
    } while (train.getError() > 0.001);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for (MLDataPair pair : trainingSet) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(
          "actual-->" + Labeler.getWord(output) + ", ideal-->" + Labeler.getWord(pair.getIdeal()));
    }

    Encog.getInstance().shutdown();
  }
  /**
   * Use an array to populate the memory of the neural network.
   *
   * @param array An array of doubles.
   * @param network The network to encode.
   */
  public static void arrayToNetwork(final double[] array, final BasicNetwork network) {

    int index = 0;

    for (final Layer layer : network.getStructure().getLayers()) {
      if (layer.hasBias()) {
        // process layer bias
        for (int i = 0; i < layer.getNeuronCount(); i++) {
          layer.setBiasWeight(i, array[index++]);
        }
      }

      if (network.getStructure().isConnectionLimited()) {
        index = NetworkCODEC.processSynapseLimited(network, layer, array, index);
      } else {
        index = NetworkCODEC.processSynapseFull(network, layer, array, index);
      }
    }
  }
 @Override
 public MLMethod createML(int inputs, int outputs) {
   BasicNetwork network = new BasicNetwork();
   network.addLayer(new BasicLayer(activation, false, inputs)); // (inputs));
   for (Integer layerSize : layers) network.addLayer(new BasicLayer(activation, true, layerSize));
   network.addLayer(new BasicLayer(activation, true, outputs));
   network.getStructure().finalizeStructure();
   network.reset();
   return network;
 }
  @Override
  protected BasicNetwork createNetwork() {
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null, true, 22));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 30));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 3));
    network.addLayer(new BasicLayer(null, false, 15));
    network.getStructure().finalizeStructure();
    network.reset();

    return network;
  }
  /**
   * Process a partially connected synapse.
   *
   * @param network The network to process.
   * @param layer The layer to process.
   * @param array The array to process.
   * @param index The current index.
   * @return The index after this synapse has been read.
   */
  private static int processSynapseLimited(
      final BasicNetwork network, final Layer layer, final double[] array, final int index) {
    int result = index;
    // process synapses
    for (final Synapse synapse : network.getStructure().getPreviousSynapses(layer)) {
      if (synapse.getMatrix() != null) {
        // process each weight matrix
        for (int x = 0; x < synapse.getToNeuronCount(); x++) {
          for (int y = 0; y < synapse.getFromNeuronCount(); y++) {
            final double oldValue = synapse.getMatrix().get(y, x);
            double value = array[result++];
            if (Math.abs(oldValue) < network.getStructure().getConnectionLimit()) {
              value = 0;
            }
            synapse.getMatrix().set(y, x, value);
          }
        }
      }
    }

    return result;
  }
Beispiel #12
0
  @BeforeTest
  public void setup() {
    network = new BasicNetwork();
    network.addLayer(new BasicLayer(DTrainTest.INPUT_COUNT));
    network.addLayer(new BasicLayer(DTrainTest.HIDDEN_COUNT));
    network.addLayer(new BasicLayer(DTrainTest.OUTPUT_COUNT));
    network.getStructure().finalizeStructure();
    network.reset();

    weights = network.getFlat().getWeights();

    training = RandomTrainingFactory.generate(1000, 10000, INPUT_COUNT, OUTPUT_COUNT, -1, 1);
  }
Beispiel #13
0
 /**
  * Generate the RSOM network.
  *
  * @return The neural network.
  */
 public BasicNetwork generate() {
   final Layer input = new BasicLayer(new ActivationLinear(), false, this.inputNeurons);
   final Layer output = new BasicLayer(new ActivationLinear(), false, this.outputNeurons);
   int y = PatternConst.START_Y;
   final BasicNetwork network = new BasicNetwork(new SOMLogic());
   network.addLayer(input);
   network.addLayer(output);
   input.setX(PatternConst.START_X);
   output.setX(PatternConst.START_X);
   input.setY(y);
   y += PatternConst.INC_Y;
   output.setY(y);
   network.getStructure().finalizeStructure();
   network.reset();
   return network;
 }
  /**
   * Construct a network analyze class. Analyze the specified network.
   *
   * @param network The network to analyze.
   */
  public AnalyzeNetwork(final BasicNetwork network) {
    final int assignDisabled = 0;
    final int assignedTotal = 0;
    final List<Double> biasList = new ArrayList<Double>();
    final List<Double> weightList = new ArrayList<Double>();
    final List<Double> allList = new ArrayList<Double>();

    for (int layerNumber = 0; layerNumber < network.getLayerCount() - 1; layerNumber++) {
      final int fromCount = network.getLayerNeuronCount(layerNumber);
      final int fromBiasCount = network.getLayerTotalNeuronCount(layerNumber);
      final int toCount = network.getLayerNeuronCount(layerNumber + 1);

      // weights
      for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++) {
        for (int toNeuron = 0; toNeuron < toCount; toNeuron++) {
          final double v = network.getWeight(layerNumber, fromNeuron, toNeuron);
          weightList.add(v);
          allList.add(v);
        }
      }

      // bias
      if (fromCount != fromBiasCount) {
        final int biasNeuron = fromCount;
        for (int toNeuron = 0; toNeuron < toCount; toNeuron++) {
          final double v = network.getWeight(layerNumber, biasNeuron, toNeuron);
          biasList.add(v);
          allList.add(v);
        }
      }
    }

    for (final Layer layer : network.getStructure().getLayers()) {
      if (layer.hasBias()) {
        for (int i = 0; i < layer.getNeuronCount(); i++) {}
      }
    }

    this.disabledConnections = assignDisabled;
    this.totalConnections = assignedTotal;
    this.weights = new NumericRange(weightList);
    this.bias = new NumericRange(biasList);
    this.weightsAndBias = new NumericRange(allList);
    this.weightValues = EngineArray.listToDouble(weightList);
    this.allValues = EngineArray.listToDouble(allList);
    this.biasValues = EngineArray.listToDouble(biasList);
  }
  /**
   * Process a fully connected synapse.
   *
   * @param network The network to process.
   * @param layer The layer to process.
   * @param array The array to process.
   * @param index The current index.
   * @return The index after this synapse has been read.
   */
  private static int processSynapseFull(
      final BasicNetwork network, final Layer layer, final double[] array, final int index) {
    int result = index;
    // process synapses
    for (final Synapse synapse : network.getStructure().getPreviousSynapses(layer)) {
      if (synapse.getMatrix() != null) {
        // process each weight matrix
        for (int x = 0; x < synapse.getToNeuronCount(); x++) {
          for (int y = 0; y < synapse.getFromNeuronCount(); y++) {
            synapse.getMatrix().set(y, x, array[result++]);
          }
        }
      }
    }

    return result;
  }
Beispiel #16
0
  /**
   * The main method.
   *
   * @param args No arguments are used.
   */
  public static void main(final String args[]) {

    // create a neural network, without using a factory
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null, true, 2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
    network.getStructure().finalizeStructure();
    network.reset();

    // create training data
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final StochasticGradientDescent train = new StochasticGradientDescent(network, trainingSet);
    train.setUpdateRule(new RMSPropUpdate());

    int epoch = 1;

    do {
      train.iteration();
      System.out.println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while (train.getError() > 0.01);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for (MLDataPair pair : trainingSet) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(
          pair.getInput().getData(0)
              + ","
              + pair.getInput().getData(1)
              + ", actual="
              + output.getData(0)
              + ",ideal="
              + pair.getIdeal().getData(0));
    }

    PerturbationFeatureImportanceCalc d;

    Encog.getInstance().shutdown();
  }
Beispiel #17
0
  public static void main(String args[]) {
    int inputNeurons = CHAR_WIDTH * CHAR_HEIGHT;
    int outputNeurons = DIGITS.length;

    BasicNetwork network = new BasicNetwork();

    Layer inputLayer = new BasicLayer(new ActivationLinear(), false, inputNeurons);
    Layer outputLayer = new BasicLayer(new ActivationLinear(), true, outputNeurons);

    network.addLayer(inputLayer);
    network.addLayer(outputLayer);
    network.getStructure().finalizeStructure();

    (new RangeRandomizer(-0.5, 0.5)).randomize(network);

    // train it
    NeuralDataSet training = generateTraining();
    Train train = new TrainAdaline(network, training, 0.01);

    int epoch = 1;
    do {
      train.iteration();
      System.out.println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while (train.getError() > 0.01);

    //
    System.out.println("Error:" + network.calculateError(training));

    // test it
    for (int i = 0; i < DIGITS.length; i++) {
      int output = network.winner(image2data(DIGITS[i]));

      for (int j = 0; j < CHAR_HEIGHT; j++) {
        if (j == CHAR_HEIGHT - 1) System.out.println(DIGITS[i][j] + " -> " + output);
        else System.out.println(DIGITS[i][j]);
      }

      System.out.println();
    }
  }