@Test public void testDualOutput() { BasicNetwork network = new BasicNetwork(); network.addLayer(new BasicLayer(null, true, 2)); network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 2)); network.getStructure().finalizeStructure(); (new ConsistentRandomizer(-1, 1)).randomize(network); MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT, XOR.XOR_IDEAL2); HessianFD testFD = new HessianFD(); testFD.init(network, trainingData); testFD.compute(); // dump(testFD, "FD"); HessianCR testCR = new HessianCR(); testCR.init(network, trainingData); testCR.compute(); // dump(testCR, "CR"); Assert.assertTrue(testCR.getHessianMatrix().equals(testFD.getHessianMatrix(), 4)); }
/** * Create a feed forward network. * * @param architecture The architecture string to use. * @param input The input count. * @param output The output count. * @return The feedforward network. */ public final MLMethod create(final String architecture, final int input, final int output) { if (input <= 0) { throw new EncogError("Must have at least one input for feedforward."); } if (output <= 0) { throw new EncogError("Must have at least one output for feedforward."); } final BasicNetwork result = new BasicNetwork(); final List<String> layers = ArchitectureParse.parseLayers(architecture); ActivationFunction af = new ActivationLinear(); int questionPhase = 0; for (final String layerStr : layers) { int defaultCount; // determine default if (questionPhase == 0) { defaultCount = input; } else { defaultCount = output; } final ArchitectureLayer layer = ArchitectureParse.parseLayer(layerStr, defaultCount); final boolean bias = layer.isBias(); String part = layer.getName(); if (part != null) { part = part.trim(); } else { part = ""; } ActivationFunction lookup = this.factory.create(part); if (lookup != null) { af = lookup; } else { if (layer.isUsedDefault()) { questionPhase++; if (questionPhase > 2) { throw new EncogError("Only two ?'s may be used."); } } if (layer.getCount() == 0) { throw new EncogError( "Unknown architecture element: " + architecture + ", can't parse: " + part); } result.addLayer(new BasicLayer(af, bias, layer.getCount())); } } result.getStructure().finalizeStructure(); result.reset(); return result; }
/** * Convert to an array. This is used with some training algorithms that require that the "memory" * of the neuron(the weight and bias values) be expressed as a linear array. * * @param network The network to encode. * @return The memory of the neuron. */ public static double[] networkToArray(final BasicNetwork network) { final int size = network.getStructure().calculateSize(); // allocate an array to hold final double[] result = new double[size]; int index = 0; for (final Layer layer : network.getStructure().getLayers()) { // process layer bias if (layer.hasBias()) { for (int i = 0; i < layer.getNeuronCount(); i++) { result[index++] = layer.getBiasWeight(i); } } // process synapses for (final Synapse synapse : network.getStructure().getPreviousSynapses(layer)) { if (synapse.getMatrix() != null) { // process each weight matrix for (int x = 0; x < synapse.getToNeuronCount(); x++) { for (int y = 0; y < synapse.getFromNeuronCount(); y++) { result[index++] = synapse.getMatrix().get(y, x); } } } } } return result; }
/** * Generate the network. * * @return The generated network. */ public BasicNetwork generate() { Layer input, instar, outstar; int y = PatternConst.START_Y; final BasicNetwork network = new BasicNetwork(); network.addLayer(input = new BasicLayer(new ActivationLinear(), false, this.inputCount)); network.addLayer(instar = new BasicLayer(new ActivationCompetitive(), false, this.instarCount)); network.addLayer(outstar = new BasicLayer(new ActivationLinear(), false, this.outstarCount)); network.getStructure().finalizeStructure(); network.reset(); input.setX(PatternConst.START_X); input.setY(y); y += PatternConst.INC_Y; instar.setX(PatternConst.START_X); instar.setY(y); y += PatternConst.INC_Y; outstar.setX(PatternConst.START_X); outstar.setY(y); // tag as needed network.tagLayer(BasicNetwork.TAG_INPUT, input); network.tagLayer(BasicNetwork.TAG_OUTPUT, outstar); network.tagLayer(CPNPattern.TAG_INSTAR, instar); network.tagLayer(CPNPattern.TAG_OUTSTAR, outstar); return network; }
public void testFeedforwardPersist() throws Throwable { NeuralDataSet trainingData = new BasicNeuralDataSet(XOR.XOR_INPUT, XOR.XOR_IDEAL); BasicNetwork network = createNetwork(); Train train = new Backpropagation(network, trainingData, 0.7, 0.9); for (int i = 0; i < 5000; i++) { train.iteration(); network = (BasicNetwork) train.getNetwork(); } TestCase.assertTrue("Error too high for backpropagation", train.getError() < 0.1); TestCase.assertTrue("XOR outputs not correct", XOR.verifyXOR(network, 0.1)); EncogPersistedCollection encog = new EncogPersistedCollection(); encog.add(network); encog.save("encogtest.xml"); EncogPersistedCollection encog2 = new EncogPersistedCollection(); encog2.load("encogtest.xml"); new File("encogtest.xml").delete(); BasicNetwork n = (BasicNetwork) encog2.getList().get(0); TestCase.assertTrue("Error too high for load", n.calculateError(trainingData) < 0.1); }
public void testHopfieldPersist() throws Exception { boolean input[] = {true, false, true, false}; BasicNetwork network = new BasicNetwork(); network.addLayer(new HopfieldLayer(4)); NeuralData data = new BiPolarNeuralData(input); Train train = new TrainHopfield(data, network); train.iteration(); EncogPersistedCollection encog = new EncogPersistedCollection(); encog.add(network); encog.save("encogtest.xml"); EncogPersistedCollection encog2 = new EncogPersistedCollection(); encog2.load("encogtest.xml"); new File("encogtest.xml").delete(); BasicNetwork network2 = (BasicNetwork) encog2.getList().get(0); BiPolarNeuralData output = (BiPolarNeuralData) network2.compute(new BiPolarNeuralData(input)); TestCase.assertTrue(output.getBoolean(0)); TestCase.assertFalse(output.getBoolean(1)); TestCase.assertTrue(output.getBoolean(2)); TestCase.assertFalse(output.getBoolean(3)); }
public static void train(File dataDir) { final File networkFile = new File(dataDir, Config.NETWORK_FILE); final File trainingFile = new File(dataDir, Config.TRAINING_FILE); // network file if (!networkFile.exists()) { System.out.println("Can't read file: " + networkFile.getAbsolutePath()); return; } BasicNetwork network = (BasicNetwork) EncogDirectoryPersistence.loadObject(networkFile); // training file if (!trainingFile.exists()) { System.out.println("Can't read file: " + trainingFile.getAbsolutePath()); return; } final MLDataSet trainingSet = EncogUtility.loadEGB2Memory(trainingFile); // train the neural network EncogUtility.trainConsole(network, trainingSet, Config.TRAINING_MINUTES); System.out.println("Final Error: " + (float) network.calculateError(trainingSet)); System.out.println("Training complete, saving network."); EncogDirectoryPersistence.saveObject(networkFile, network); System.out.println("Network saved."); Encog.getInstance().shutdown(); }
private BasicNetwork createNetwork() { BasicNetwork network = new BasicNetwork(); network.addLayer(new FeedforwardLayer(2)); network.addLayer(new FeedforwardLayer(3)); network.addLayer(new FeedforwardLayer(1)); network.reset(); return network; }
public void testClone() throws Throwable { BasicNetwork source = XOR.createThreeLayerNet(); source.reset(); BasicNetwork target = (BasicNetwork) source.clone(); TestCase.assertTrue(target.equals(source)); }
public static BasicNetwork generateNetwork() { final BasicNetwork network = new BasicNetwork(); network.addLayer(new BasicLayer(MultiBench.INPUT_COUNT)); network.addLayer(new BasicLayer(MultiBench.HIDDEN_COUNT)); network.addLayer(new BasicLayer(MultiBench.OUTPUT_COUNT)); network.getStructure().finalizeStructure(); network.reset(); return network; }
public void loadAndEvaluate() { System.out.println("Loading network"); final EncogPersistedCollection encog = new EncogPersistedCollection(FILENAME); BasicNetwork network = (BasicNetwork) encog.find("network"); NeuralDataSet trainingSet = new BasicNeuralDataSet(XOR_INPUT, XOR_IDEAL); double e = network.calculateError(trainingSet); System.out.println("Loaded network's error is(should be same as above): " + e); }
public void testAnalyze() { BasicNetwork network = EncogUtility.simpleFeedForward(2, 2, 0, 1, false); double[] weights = new double[network.encodedArrayLength()]; EngineArray.fill(weights, 1.0); network.decodeFromArray(weights); AnalyzeNetwork analyze = new AnalyzeNetwork(network); Assert.assertEquals(weights.length, analyze.getWeightsAndBias().getSamples()); Assert.assertEquals(3, analyze.getBias().getSamples()); Assert.assertEquals(6, analyze.getWeights().getSamples()); }
@Override public void randomize(MLMethod method) { if (!(method instanceof BasicNetwork)) { throw new EncogError("Nguyen-Widrow only supports BasicNetwork."); } BasicNetwork network = (BasicNetwork) method; for (int fromLayer = 0; fromLayer < network.getLayerCount() - 1; fromLayer++) { randomizeSynapse(network, fromLayer); } }
@BeforeTest public void setup() { network = new BasicNetwork(); network.addLayer(new BasicLayer(DTrainTest.INPUT_COUNT)); network.addLayer(new BasicLayer(DTrainTest.HIDDEN_COUNT)); network.addLayer(new BasicLayer(DTrainTest.OUTPUT_COUNT)); network.getStructure().finalizeStructure(); network.reset(); weights = network.getFlat().getWeights(); training = RandomTrainingFactory.generate(1000, 10000, INPUT_COUNT, OUTPUT_COUNT, -1, 1); }
public void performEvaluate() { try { EvaluateDialog dialog = new EvaluateDialog(EncogWorkBench.getInstance().getMainWindow()); if (dialog.process()) { BasicNetwork network = dialog.getNetwork(); NeuralDataSet training = dialog.getTrainingSet(); double error = network.calculateError(training); EncogWorkBench.displayMessage("Error For this Network", "" + Format.formatPercent(error)); } } catch (Throwable t) { EncogWorkBench.displayError("Error Evaluating Network", t); } }
/** * Update the velocity, position and personal best position of a particle * * @param particleIndex index of the particle in the swarm * @param init if true, the position and velocity will be initialised. */ protected void updateParticle(int particleIndex, boolean init) { int i = particleIndex; double[] particlePosition = null; if (init) { // Create a new particle with random values. // Except the first particle which has the same values // as the network passed to the algorithm. if (m_networks[i] == null) { m_networks[i] = (BasicNetwork) m_bestNetwork.clone(); if (i > 0) m_randomizer.randomize(m_networks[i]); } particlePosition = getNetworkState(i); m_bestVectors[i] = particlePosition; // randomise the velocity m_va.randomise(m_velocities[i], m_maxVelocity); } else { particlePosition = getNetworkState(i); updateVelocity(i, particlePosition); // velocity clamping m_va.clampComponents(m_velocities[i], m_maxVelocity); // new position (Xt = Xt-1 + Vt) m_va.add(particlePosition, m_velocities[i]); // pin the particle against the boundary of the search space. // (only for the components exceeding maxPosition) m_va.clampComponents(particlePosition, m_maxPosition); setNetworkState(i, particlePosition); } updatePersonalBestPosition(i, particlePosition); }
/** * Generate the RSOM network. * * @return The neural network. */ public BasicNetwork generate() { final Layer input = new BasicLayer(new ActivationLinear(), false, this.inputNeurons); final Layer output = new BasicLayer(new ActivationLinear(), false, this.outputNeurons); int y = PatternConst.START_Y; final BasicNetwork network = new BasicNetwork(new SOMLogic()); network.addLayer(input); network.addLayer(output); input.setX(PatternConst.START_X); output.setX(PatternConst.START_X); input.setY(y); y += PatternConst.INC_Y; output.setY(y); network.getStructure().finalizeStructure(); network.reset(); return network; }
public void calculateWeights(BasicNetwork network) { int n1, n2, n3, n4; int i, j; int predN3, succN3; double weight; BoltzmannLogic logic = (BoltzmannLogic) network.getLogic(); for (n1 = 0; n1 < NUM_CITIES; n1++) { for (n2 = 0; n2 < NUM_CITIES; n2++) { i = n1 * NUM_CITIES + n2; for (n3 = 0; n3 < NUM_CITIES; n3++) { for (n4 = 0; n4 < NUM_CITIES; n4++) { j = n3 * NUM_CITIES + n4; weight = 0; if (i != j) { predN3 = (n3 == 0 ? NUM_CITIES - 1 : n3 - 1); succN3 = (n3 == NUM_CITIES - 1 ? 0 : n3 + 1); if ((n1 == n3) || (n2 == n4)) weight = -gamma; else if ((n1 == predN3) || (n1 == succN3)) weight = -distance[n2][n4]; } logic.getThermalSynapse().getMatrix().set(i, j, weight); } } logic.getThermalLayer().setThreshold(i, -gamma / 2); } } }
public void testFactoryFeedforward() { String architecture = "?:B->TANH->3->LINEAR->?:B"; MLMethodFactory factory = new MLMethodFactory(); BasicNetwork network = (BasicNetwork) factory.create(MLMethodFactory.TYPE_FEEDFORWARD, architecture, 1, 4); Assert.assertTrue(network.isLayerBiased(0)); Assert.assertFalse(network.isLayerBiased(1)); Assert.assertTrue(network.isLayerBiased(2)); Assert.assertEquals(3, network.getLayerCount()); Assert.assertTrue(network.getActivation(0) instanceof ActivationLinear); Assert.assertTrue(network.getActivation(1) instanceof ActivationTANH); Assert.assertTrue(network.getActivation(2) instanceof ActivationLinear); Assert.assertEquals(18, network.encodedArrayLength()); Assert.assertEquals(1, network.getLayerNeuronCount(0)); Assert.assertEquals(3, network.getLayerNeuronCount(1)); Assert.assertEquals(4, network.getLayerNeuronCount(2)); }
public void run() { BoltzmannPattern pattern = new BoltzmannPattern(); pattern.setInputNeurons(NEURON_COUNT); BasicNetwork network = pattern.generate(); BoltzmannLogic logic = (BoltzmannLogic) network.getLogic(); createCities(); calculateWeights(network); logic.setTemperature(100); do { logic.establishEquilibrium(); System.out.println(logic.getTemperature() + " : " + displayTour(logic.getCurrentState())); logic.decreaseTemperature(0.99); } while (!isValidTour(logic.getCurrentState())); System.out.println("Final Length: " + this.lengthOfTour(logic.getCurrentState())); }
public PredictSIN() { this.setTitle("SIN Wave Predict"); this.setSize(640, 480); Container content = this.getContentPane(); content.setLayout(new BorderLayout()); content.add(graph = new GraphPanel(), BorderLayout.CENTER); network = EncogUtility.simpleFeedForward(INPUT_WINDOW, PREDICT_WINDOW * 2, 0, 1, true); network.reset(); graph.setNetwork(network); this.trainingData = generateTraining(); this.train = new ResilientPropagation(this.network, this.trainingData); btnTrain = new JButton("Train"); this.btnTrain.addActionListener(this); content.add(btnTrain, BorderLayout.SOUTH); graph.setError(network.calculateError(this.trainingData)); }
/** * Use an array to populate the memory of the neural network. * * @param array An array of doubles. * @param network The network to encode. */ public static void arrayToNetwork(final double[] array, final BasicNetwork network) { int index = 0; for (final Layer layer : network.getStructure().getLayers()) { if (layer.hasBias()) { // process layer bias for (int i = 0; i < layer.getNeuronCount(); i++) { layer.setBiasWeight(i, array[index++]); } } if (network.getStructure().isConnectionLimited()) { index = NetworkCODEC.processSynapseLimited(network, layer, array, index); } else { index = NetworkCODEC.processSynapseFull(network, layer, array, index); } } }
public void recognizer(List<File> files) { FeatureExtractor fe = new FeatureExtractor(); MLDataSet trainingSet = new BasicMLDataSet(); for (File f : files) { // System.out.println(f.getAbsolutePath()); List<double[]> data; try { data = fe.fileProcessor(f); MLData mldataIn = new BasicMLData(data.get(0)); double[] out = new double[NUM_OUT]; Integer index = new Integer(Labeler.getLabel(f)); // System.out.println(index+""+data.get(0)); out[index] = 1.; System.out.println(out.toString()); MLData mldataout = new BasicMLData(out); trainingSet.add(mldataIn, mldataout); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } BasicNetwork network = new BasicNetwork(); network.addLayer(new BasicLayer(NUM_IN)); network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 4 * NUM_IN)); // network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 2 * NUM_IN)); network.addLayer(new BasicLayer(new ActivationSigmoid(), false, NUM_OUT)); network.getStructure().finalizeStructure(); network.reset(); // train the neural network ResilientPropagation train = new ResilientPropagation(network, trainingSet); System.out.println("Training Set: " + trainingSet.size()); int epoch = 1; do { train.iteration(); System.out.println("Epoch:" + epoch + " Error-->" + train.getError()); epoch++; } while (train.getError() > 0.001); train.finishTraining(); // test the neural network System.out.println("Neural Network Results:"); for (MLDataPair pair : trainingSet) { final MLData output = network.compute(pair.getInput()); System.out.println( "actual-->" + Labeler.getWord(output) + ", ideal-->" + Labeler.getWord(pair.getIdeal())); } Encog.getInstance().shutdown(); }
/** * Save the specified object. * * @param object The node to load from. * @param hd The XML object. */ public void save(final EncogPersistedObject object, final TransformerHandler hd) { try { final AttributesImpl atts = EncogPersistedCollection.createAttributes(object); final BasicNetwork network = (BasicNetwork) object; hd.startElement("", "", network.getClass().getSimpleName(), atts); hd.startElement("", "", "layers", atts); for (final Layer layer : network.getLayers()) { if (layer instanceof EncogPersistedObject) { final EncogPersistedObject epo = (EncogPersistedObject) layer; final Persistor persistor = EncogPersistedCollection.createPersistor(layer.getClass().getSimpleName()); persistor.save(epo, hd); } } hd.endElement("", "", "layers"); hd.endElement("", "", network.getClass().getSimpleName()); } catch (final SAXException e) { throw new NeuralNetworkError(e); } }
private BasicNetwork getNetwork(ExampleSet exampleSet) throws OperatorException { BasicNetwork network = new BasicNetwork(); // input layer network.addLayer(new FeedforwardLayer(exampleSet.getAttributes().size())); // hidden layers log("No hidden layers defined. Using default hidden layers."); int layerSize = getParameterAsInt(PARAMETER_DEFAULT_HIDDEN_LAYER_SIZE); if (layerSize <= 0) layerSize = getDefaultLayerSize(exampleSet); for (int p = 0; p < getParameterAsInt(PARAMETER_DEFAULT_NUMBER_OF_HIDDEN_LAYERS); p++) { network.addLayer(new FeedforwardLayer(layerSize)); } // output layer if (exampleSet.getAttributes().getLabel().isNominal()) { network.addLayer(new FeedforwardLayer(new ActivationSigmoid(), 1)); } else { network.addLayer(new FeedforwardLayer(new ActivationLinear(), 1)); } network.reset( RandomGenerator.getRandomGenerator( getParameterAsBoolean(RandomGenerator.PARAMETER_USE_LOCAL_RANDOM_SEED), getParameterAsInt(RandomGenerator.PARAMETER_LOCAL_RANDOM_SEED))); return network; }
public void trainAndSave() { System.out.println("Training XOR network to under 1% error rate."); BasicNetwork network = new BasicNetwork(); network.addLayer(new BasicLayer(2)); network.addLayer(new BasicLayer(2)); network.addLayer(new BasicLayer(1)); network.getStructure().finalizeStructure(); network.reset(); NeuralDataSet trainingSet = new BasicNeuralDataSet(XOR_INPUT, XOR_IDEAL); // train the neural network final Train train = new ResilientPropagation(network, trainingSet); do { train.iteration(); } while (train.getError() > 0.009); double e = network.calculateError(trainingSet); System.out.println("Network traiined to error: " + e); System.out.println("Saving network"); final EncogPersistedCollection encog = new EncogPersistedCollection(FILENAME); encog.create(); encog.add("network", network); }
/** * Save the specified object. * * @param networkNode The node to load from. * @return The loaded object. */ public EncogPersistedObject load(final Element networkNode) { final BasicNetwork network = new BasicNetwork(); final String name = networkNode.getAttribute("name"); final String description = networkNode.getAttribute("description"); network.setName(name); network.setDescription(description); final Element layers = XMLUtil.findElement(networkNode, "layers"); for (Node child = layers.getFirstChild(); child != null; child = child.getNextSibling()) { if (!(child instanceof Element)) { continue; } final Element node = (Element) child; final Persistor persistor = EncogPersistedCollection.createPersistor(node.getNodeName()); if (persistor != null) { network.addLayer((Layer) persistor.load(node)); } } return network; }
/** * Create a simple feedforward neural network. * * @param input The number of input neurons. * @param hidden1 The number of hidden layer 1 neurons. * @param hidden2 The number of hidden layer 2 neurons. * @param output The number of output neurons. * @param tanh True to use hyperbolic tangent activation function, false to use the sigmoid * activation function. * @return The neural network. */ public static BasicNetwork simpleFeedForward( final int input, final int hidden1, final int hidden2, final int output, final boolean tanh) { final FeedForwardPattern pattern = new FeedForwardPattern(); pattern.setInputNeurons(input); pattern.setOutputNeurons(output); if (tanh) { pattern.setActivationFunction(new ActivationTANH()); } else { pattern.setActivationFunction(new ActivationSigmoid()); } if (hidden1 > 0) { pattern.addHiddenLayer(hidden1); } if (hidden2 > 0) { pattern.addHiddenLayer(hidden2); } final BasicNetwork network = (BasicNetwork) pattern.generate(); network.reset(); return network; }
/** * Process a partially connected synapse. * * @param network The network to process. * @param layer The layer to process. * @param array The array to process. * @param index The current index. * @return The index after this synapse has been read. */ private static int processSynapseLimited( final BasicNetwork network, final Layer layer, final double[] array, final int index) { int result = index; // process synapses for (final Synapse synapse : network.getStructure().getPreviousSynapses(layer)) { if (synapse.getMatrix() != null) { // process each weight matrix for (int x = 0; x < synapse.getToNeuronCount(); x++) { for (int y = 0; y < synapse.getFromNeuronCount(); y++) { final double oldValue = synapse.getMatrix().get(y, x); double value = array[result++]; if (Math.abs(oldValue) < network.getStructure().getConnectionLimit()) { value = 0; } synapse.getMatrix().set(y, x, value); } } } } return result; }
@Override protected BasicNetwork createNetwork() { BasicNetwork network = new BasicNetwork(); network.addLayer(new BasicLayer(null, true, 22)); network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 30)); network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 3)); network.addLayer(new BasicLayer(null, false, 15)); network.getStructure().finalizeStructure(); network.reset(); return network; }