Exemplo n.º 1
0
  /**
   * Generate the network.
   *
   * @return The generated network.
   */
  public BasicNetwork generate() {

    Layer input, instar, outstar;
    int y = PatternConst.START_Y;

    final BasicNetwork network = new BasicNetwork();
    network.addLayer(input = new BasicLayer(new ActivationLinear(), false, this.inputCount));
    network.addLayer(instar = new BasicLayer(new ActivationCompetitive(), false, this.instarCount));
    network.addLayer(outstar = new BasicLayer(new ActivationLinear(), false, this.outstarCount));
    network.getStructure().finalizeStructure();
    network.reset();

    input.setX(PatternConst.START_X);
    input.setY(y);
    y += PatternConst.INC_Y;

    instar.setX(PatternConst.START_X);
    instar.setY(y);
    y += PatternConst.INC_Y;

    outstar.setX(PatternConst.START_X);
    outstar.setY(y);

    // tag as needed
    network.tagLayer(BasicNetwork.TAG_INPUT, input);
    network.tagLayer(BasicNetwork.TAG_OUTPUT, outstar);
    network.tagLayer(CPNPattern.TAG_INSTAR, instar);
    network.tagLayer(CPNPattern.TAG_OUTSTAR, outstar);

    return network;
  }
Exemplo n.º 2
0
  @Test
  public void testDualOutput() {

    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null, true, 2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 2));
    network.getStructure().finalizeStructure();

    (new ConsistentRandomizer(-1, 1)).randomize(network);

    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT, XOR.XOR_IDEAL2);

    HessianFD testFD = new HessianFD();
    testFD.init(network, trainingData);
    testFD.compute();

    // dump(testFD, "FD");

    HessianCR testCR = new HessianCR();
    testCR.init(network, trainingData);
    testCR.compute();

    // dump(testCR, "CR");
    Assert.assertTrue(testCR.getHessianMatrix().equals(testFD.getHessianMatrix(), 4));
  }
Exemplo n.º 3
0
  public void trainAndSave() {
    System.out.println("Training XOR network to under 1% error rate.");
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(2));
    network.addLayer(new BasicLayer(2));
    network.addLayer(new BasicLayer(1));
    network.getStructure().finalizeStructure();
    network.reset();

    NeuralDataSet trainingSet = new BasicNeuralDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final Train train = new ResilientPropagation(network, trainingSet);

    do {
      train.iteration();
    } while (train.getError() > 0.009);

    double e = network.calculateError(trainingSet);
    System.out.println("Network traiined to error: " + e);

    System.out.println("Saving network");
    final EncogPersistedCollection encog = new EncogPersistedCollection(FILENAME);
    encog.create();
    encog.add("network", network);
  }
  private BasicNetwork getNetwork(ExampleSet exampleSet) throws OperatorException {
    BasicNetwork network = new BasicNetwork();

    // input layer
    network.addLayer(new FeedforwardLayer(exampleSet.getAttributes().size()));

    // hidden layers
    log("No hidden layers defined. Using default hidden layers.");
    int layerSize = getParameterAsInt(PARAMETER_DEFAULT_HIDDEN_LAYER_SIZE);
    if (layerSize <= 0) layerSize = getDefaultLayerSize(exampleSet);
    for (int p = 0; p < getParameterAsInt(PARAMETER_DEFAULT_NUMBER_OF_HIDDEN_LAYERS); p++) {
      network.addLayer(new FeedforwardLayer(layerSize));
    }

    // output layer
    if (exampleSet.getAttributes().getLabel().isNominal()) {
      network.addLayer(new FeedforwardLayer(new ActivationSigmoid(), 1));
    } else {
      network.addLayer(new FeedforwardLayer(new ActivationLinear(), 1));
    }

    network.reset(
        RandomGenerator.getRandomGenerator(
            getParameterAsBoolean(RandomGenerator.PARAMETER_USE_LOCAL_RANDOM_SEED),
            getParameterAsInt(RandomGenerator.PARAMETER_LOCAL_RANDOM_SEED)));

    return network;
  }
Exemplo n.º 5
0
 private BasicNetwork createNetwork() {
   BasicNetwork network = new BasicNetwork();
   network.addLayer(new FeedforwardLayer(2));
   network.addLayer(new FeedforwardLayer(3));
   network.addLayer(new FeedforwardLayer(1));
   network.reset();
   return network;
 }
Exemplo n.º 6
0
 public static BasicNetwork generateNetwork() {
   final BasicNetwork network = new BasicNetwork();
   network.addLayer(new BasicLayer(MultiBench.INPUT_COUNT));
   network.addLayer(new BasicLayer(MultiBench.HIDDEN_COUNT));
   network.addLayer(new BasicLayer(MultiBench.OUTPUT_COUNT));
   network.getStructure().finalizeStructure();
   network.reset();
   return network;
 }
Exemplo n.º 7
0
  public void recognizer(List<File> files) {

    FeatureExtractor fe = new FeatureExtractor();
    MLDataSet trainingSet = new BasicMLDataSet();
    for (File f : files) {
      // System.out.println(f.getAbsolutePath());

      List<double[]> data;
      try {
        data = fe.fileProcessor(f);
        MLData mldataIn = new BasicMLData(data.get(0));
        double[] out = new double[NUM_OUT];
        Integer index = new Integer(Labeler.getLabel(f));
        // System.out.println(index+""+data.get(0));
        out[index] = 1.;
        System.out.println(out.toString());
        MLData mldataout = new BasicMLData(out);
        trainingSet.add(mldataIn, mldataout);
      } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }
    }

    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(NUM_IN));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 4 * NUM_IN));
    // network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 2 * NUM_IN));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, NUM_OUT));
    network.getStructure().finalizeStructure();
    network.reset();

    // train the neural network
    ResilientPropagation train = new ResilientPropagation(network, trainingSet);

    System.out.println("Training Set: " + trainingSet.size());

    int epoch = 1;

    do {
      train.iteration();
      System.out.println("Epoch:" + epoch + " Error-->" + train.getError());
      epoch++;
    } while (train.getError() > 0.001);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for (MLDataPair pair : trainingSet) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(
          "actual-->" + Labeler.getWord(output) + ", ideal-->" + Labeler.getWord(pair.getIdeal()));
    }

    Encog.getInstance().shutdown();
  }
 @Override
 public MLMethod createML(int inputs, int outputs) {
   BasicNetwork network = new BasicNetwork();
   network.addLayer(new BasicLayer(activation, false, inputs)); // (inputs));
   for (Integer layerSize : layers) network.addLayer(new BasicLayer(activation, true, layerSize));
   network.addLayer(new BasicLayer(activation, true, outputs));
   network.getStructure().finalizeStructure();
   network.reset();
   return network;
 }
Exemplo n.º 9
0
  @Override
  protected BasicNetwork createNetwork() {
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null, true, 22));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 30));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 3));
    network.addLayer(new BasicLayer(null, false, 15));
    network.getStructure().finalizeStructure();
    network.reset();

    return network;
  }
Exemplo n.º 10
0
  @BeforeTest
  public void setup() {
    network = new BasicNetwork();
    network.addLayer(new BasicLayer(DTrainTest.INPUT_COUNT));
    network.addLayer(new BasicLayer(DTrainTest.HIDDEN_COUNT));
    network.addLayer(new BasicLayer(DTrainTest.OUTPUT_COUNT));
    network.getStructure().finalizeStructure();
    network.reset();

    weights = network.getFlat().getWeights();

    training = RandomTrainingFactory.generate(1000, 10000, INPUT_COUNT, OUTPUT_COUNT, -1, 1);
  }
Exemplo n.º 11
0
 /**
  * Generate the RSOM network.
  *
  * @return The neural network.
  */
 public BasicNetwork generate() {
   final Layer input = new BasicLayer(new ActivationLinear(), false, this.inputNeurons);
   final Layer output = new BasicLayer(new ActivationLinear(), false, this.outputNeurons);
   int y = PatternConst.START_Y;
   final BasicNetwork network = new BasicNetwork(new SOMLogic());
   network.addLayer(input);
   network.addLayer(output);
   input.setX(PatternConst.START_X);
   output.setX(PatternConst.START_X);
   input.setY(y);
   y += PatternConst.INC_Y;
   output.setY(y);
   network.getStructure().finalizeStructure();
   network.reset();
   return network;
 }
Exemplo n.º 12
0
  public void testHopfieldPersist() throws Exception {
    boolean input[] = {true, false, true, false};

    BasicNetwork network = new BasicNetwork();
    network.addLayer(new HopfieldLayer(4));

    NeuralData data = new BiPolarNeuralData(input);
    Train train = new TrainHopfield(data, network);
    train.iteration();

    EncogPersistedCollection encog = new EncogPersistedCollection();
    encog.add(network);
    encog.save("encogtest.xml");

    EncogPersistedCollection encog2 = new EncogPersistedCollection();
    encog2.load("encogtest.xml");
    new File("encogtest.xml").delete();

    BasicNetwork network2 = (BasicNetwork) encog2.getList().get(0);

    BiPolarNeuralData output = (BiPolarNeuralData) network2.compute(new BiPolarNeuralData(input));
    TestCase.assertTrue(output.getBoolean(0));
    TestCase.assertFalse(output.getBoolean(1));
    TestCase.assertTrue(output.getBoolean(2));
    TestCase.assertFalse(output.getBoolean(3));
  }
  /**
   * Create a feed forward network.
   *
   * @param architecture The architecture string to use.
   * @param input The input count.
   * @param output The output count.
   * @return The feedforward network.
   */
  public final MLMethod create(final String architecture, final int input, final int output) {

    if (input <= 0) {
      throw new EncogError("Must have at least one input for feedforward.");
    }

    if (output <= 0) {
      throw new EncogError("Must have at least one output for feedforward.");
    }

    final BasicNetwork result = new BasicNetwork();
    final List<String> layers = ArchitectureParse.parseLayers(architecture);
    ActivationFunction af = new ActivationLinear();

    int questionPhase = 0;
    for (final String layerStr : layers) {
      int defaultCount;
      // determine default
      if (questionPhase == 0) {
        defaultCount = input;
      } else {
        defaultCount = output;
      }

      final ArchitectureLayer layer = ArchitectureParse.parseLayer(layerStr, defaultCount);
      final boolean bias = layer.isBias();

      String part = layer.getName();
      if (part != null) {
        part = part.trim();
      } else {
        part = "";
      }

      ActivationFunction lookup = this.factory.create(part);

      if (lookup != null) {
        af = lookup;
      } else {
        if (layer.isUsedDefault()) {
          questionPhase++;
          if (questionPhase > 2) {
            throw new EncogError("Only two ?'s may be used.");
          }
        }

        if (layer.getCount() == 0) {
          throw new EncogError(
              "Unknown architecture element: " + architecture + ", can't parse: " + part);
        }

        result.addLayer(new BasicLayer(af, bias, layer.getCount()));
      }
    }

    result.getStructure().finalizeStructure();
    result.reset();

    return result;
  }
Exemplo n.º 14
0
  /**
   * The main method.
   *
   * @param args No arguments are used.
   */
  public static void main(final String args[]) {

    // create a neural network, without using a factory
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null, true, 2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
    network.getStructure().finalizeStructure();
    network.reset();

    // create training data
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final StochasticGradientDescent train = new StochasticGradientDescent(network, trainingSet);
    train.setUpdateRule(new RMSPropUpdate());

    int epoch = 1;

    do {
      train.iteration();
      System.out.println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while (train.getError() > 0.01);
    train.finishTraining();

    // test the neural network
    System.out.println("Neural Network Results:");
    for (MLDataPair pair : trainingSet) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(
          pair.getInput().getData(0)
              + ","
              + pair.getInput().getData(1)
              + ", actual="
              + output.getData(0)
              + ",ideal="
              + pair.getIdeal().getData(0));
    }

    PerturbationFeatureImportanceCalc d;

    Encog.getInstance().shutdown();
  }
Exemplo n.º 15
0
  public static void main(String args[]) {
    int inputNeurons = CHAR_WIDTH * CHAR_HEIGHT;
    int outputNeurons = DIGITS.length;

    BasicNetwork network = new BasicNetwork();

    Layer inputLayer = new BasicLayer(new ActivationLinear(), false, inputNeurons);
    Layer outputLayer = new BasicLayer(new ActivationLinear(), true, outputNeurons);

    network.addLayer(inputLayer);
    network.addLayer(outputLayer);
    network.getStructure().finalizeStructure();

    (new RangeRandomizer(-0.5, 0.5)).randomize(network);

    // train it
    NeuralDataSet training = generateTraining();
    Train train = new TrainAdaline(network, training, 0.01);

    int epoch = 1;
    do {
      train.iteration();
      System.out.println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while (train.getError() > 0.01);

    //
    System.out.println("Error:" + network.calculateError(training));

    // test it
    for (int i = 0; i < DIGITS.length; i++) {
      int output = network.winner(image2data(DIGITS[i]));

      for (int j = 0; j < CHAR_HEIGHT; j++) {
        if (j == CHAR_HEIGHT - 1) System.out.println(DIGITS[i][j] + " -> " + output);
        else System.out.println(DIGITS[i][j]);
      }

      System.out.println();
    }
  }
Exemplo n.º 16
0
  public void testSOMPersist() throws Exception {
    Matrix matrix = new Matrix(TestPersist.trainedData);
    double pattern1[] = {-0.5, -0.5, -0.5, -0.5};
    double pattern2[] = {0.5, 0.5, 0.5, 0.5};
    double pattern3[] = {-0.5, -0.5, -0.5, 0.5};
    double pattern4[] = {0.5, 0.5, 0.5, -0.5};

    NeuralData data1 = new BasicNeuralData(pattern1);
    NeuralData data2 = new BasicNeuralData(pattern2);
    NeuralData data3 = new BasicNeuralData(pattern3);
    NeuralData data4 = new BasicNeuralData(pattern4);

    SOMLayer layer;
    BasicNetwork network = new BasicNetwork();
    network.addLayer(layer = new SOMLayer(4, NormalizationType.MULTIPLICATIVE));
    network.addLayer(new BasicLayer(2));
    layer.setMatrix(matrix);

    EncogPersistedCollection encog = new EncogPersistedCollection();
    encog.add(network);
    encog.save("encogtest.xml");

    EncogPersistedCollection encog2 = new EncogPersistedCollection();
    encog2.load("encogtest.xml");
    new File("encogtest.xml").delete();

    BasicNetwork network2 = (BasicNetwork) encog2.getList().get(0);

    int data1Neuron = network2.winner(data1);
    int data2Neuron = network2.winner(data2);

    TestCase.assertTrue(data1Neuron != data2Neuron);

    int data3Neuron = network2.winner(data3);
    int data4Neuron = network2.winner(data4);

    TestCase.assertTrue(data3Neuron == data1Neuron);
    TestCase.assertTrue(data4Neuron == data2Neuron);
  }
Exemplo n.º 17
0
  /**
   * Save the specified object.
   *
   * @param networkNode The node to load from.
   * @return The loaded object.
   */
  public EncogPersistedObject load(final Element networkNode) {
    final BasicNetwork network = new BasicNetwork();

    final String name = networkNode.getAttribute("name");
    final String description = networkNode.getAttribute("description");
    network.setName(name);
    network.setDescription(description);

    final Element layers = XMLUtil.findElement(networkNode, "layers");
    for (Node child = layers.getFirstChild(); child != null; child = child.getNextSibling()) {
      if (!(child instanceof Element)) {
        continue;
      }
      final Element node = (Element) child;
      final Persistor persistor = EncogPersistedCollection.createPersistor(node.getNodeName());
      if (persistor != null) {
        network.addLayer((Layer) persistor.load(node));
      }
    }

    return network;
  }