/** Utility method to switch from MLData to List<Double> */ public static List<Double> doubleListFromNeuralOutput(MLData neuralOutput) { List<Double> result = new ArrayList<Double>(neuralOutput.size()); for (double value : neuralOutput.getData()) { result.add(value); } return result; }
/** Run method for the background training thread. */ public void trainSOM() { try { final int inputNeuron = OCR.DOWNSAMPLE_HEIGHT * OCR.DOWNSAMPLE_WIDTH; final int outputNeuron = this.letterListModel.size(); final MLDataSet trainingSet = new BasicMLDataSet(); for (int t = 0; t < this.letterListModel.size(); t++) { final MLData item = new BasicMLData(inputNeuron); int idx = 0; final SampleData ds = (SampleData) this.letterListModel.getElementAt(t); for (int y = 0; y < ds.getHeight(); y++) { for (int x = 0; x < ds.getWidth(); x++) { item.setData(idx++, ds.getData(x, y) ? .5 : -.5); } } trainingSet.add(new BasicMLDataPair(item, null)); } this.net = new SOM(inputNeuron, outputNeuron); this.net.reset(); SOMClusterCopyTraining train = new SOMClusterCopyTraining(this.net, trainingSet); train.iteration(); JOptionPane.showMessageDialog( this, "Training has completed.", "Training", JOptionPane.PLAIN_MESSAGE); } catch (final Exception e) { e.printStackTrace(); JOptionPane.showMessageDialog(this, "Error: " + e, "Training", JOptionPane.ERROR_MESSAGE); } }
/** * Load a CSV file into a memory dataset. * * @param format The CSV format to use. * @param filename The filename to load. * @param headers True if there is a header line. * @param inputSize The input size. Input always comes first in a file. * @param idealSize The ideal size, 0 for unsupervised. * @return A NeuralDataSet that holds the contents of the CSV file. */ public static MLDataSet loadCSVTOMemory( CSVFormat format, String filename, boolean headers, int inputSize, int idealSize) { MLDataSet result = new BasicMLDataSet(); ReadCSV csv = new ReadCSV(filename, headers, format); while (csv.next()) { MLData input = null; MLData ideal = null; int index = 0; input = new BasicMLData(inputSize); for (int i = 0; i < inputSize; i++) { double d = csv.getDouble(index++); input.setData(i, d); } if (idealSize > 0) { ideal = new BasicMLData(idealSize); for (int i = 0; i < idealSize; i++) { double d = csv.getDouble(index++); ideal.setData(i, d); } } MLDataPair pair = new BasicMLDataPair(input, ideal); result.add(pair); } return result; }
/** * Calculate the best matching unit (BMU). This is the output neuron that has the lowest Euclidean * distance to the input vector. * * @param input The input vector. * @return The output neuron number that is the BMU. */ public int calculateBMU(final MLData input) { int result = 0; if (input.size() > this.som.getInputCount()) { throw new NeuralNetworkError( "Can't train SOM with input size of " + som.getInputCount() + " with input data of count " + input.size()); } // Track the lowest distance so far. double lowestDistance = Double.MAX_VALUE; for (int i = 0; i < this.som.getOutputCount(); i++) { final double distance = calculateEuclideanDistance(this.som.getWeights(), input, i); // Track the lowest distance, this is the BMU. if (distance < lowestDistance) { lowestDistance = distance; result = i; } } // Track the worst distance, this is the error for the entire network. if (lowestDistance > this.worstDistance) { this.worstDistance = lowestDistance; } return result; }
/** * Called when the recognize button is pressed. * * @param event The event. */ void recognize_actionPerformed(final java.awt.event.ActionEvent event) { if (this.net == null) { JOptionPane.showMessageDialog( this, "I need to be trained first!", "Error", JOptionPane.ERROR_MESSAGE); return; } this.entry.downSample(); final MLData input = new BasicMLData(5 * 7); int idx = 0; final SampleData ds = this.sample.getData(); for (int y = 0; y < ds.getHeight(); y++) { for (int x = 0; x < ds.getWidth(); x++) { input.setData(idx++, ds.getData(x, y) ? .5 : -.5); } } final int best = this.net.classify(input); final char map[] = mapNeurons(); JOptionPane.showMessageDialog( this, " " + map[best] + " (Neuron #" + best + " fired)", "That Letter Is", JOptionPane.PLAIN_MESSAGE); clear_actionPerformed(null); }
/** * Process the file and cluster. * * @param outputFile The output file. * @param clusters The number of clusters. * @param theAnalyst The analyst to use. * @param iterations The number of iterations to use. */ public void process( final File outputFile, final int clusters, final EncogAnalyst theAnalyst, final int iterations) { final PrintWriter tw = this.prepareOutputFile( outputFile, analyst.getScript().getNormalize().countActiveFields() - 1, 1); resetStatus(); final KMeansClustering cluster = new KMeansClustering(clusters, this.data); cluster.iteration(iterations); int clusterNum = 0; for (final MLCluster cl : cluster.getClusters()) { for (final MLData item : cl.getData()) { final int clsIndex = item.size(); final LoadedRow lr = new LoadedRow(this.getFormat(), item.getData(), 1); lr.getData()[clsIndex] = "" + clusterNum; writeRow(tw, lr); } clusterNum++; } reportDone(false); tw.close(); }
/** * Format neural data as a list of numbers. * * @param data The neural data to format. * @return The formatted neural data. */ public static String formatNeuralData(final MLData data) { final StringBuilder result = new StringBuilder(); for (int i = 0; i < data.size(); i++) { if (i != 0) { result.append(','); } result.append(Format.formatDouble(data.getData(i), 4)); } return result.toString(); }
/** * Determine the classes for the specified input. * * @param input The input. * @return An array of class indexes. */ public int[] determineClasses(MLData input) { int[] result = new int[input.size()]; for (int i = 0; i < input.size(); i++) { BayesianEvent event = this.events.get(i); int classIndex = event.matchChoiceToRange(input.getData(i)); result[i] = classIndex; } return result; }
/** * Calculate the Euclidean distance for the specified output neuron and the input vector. This is * the square root of the squares of the differences between the weight and input vectors. * * @param matrix The matrix to get the weights from. * @param input The input vector. * @param outputNeuron The neuron we are calculating the distance for. * @return The Euclidean distance. */ public double calculateEuclideanDistance( final Matrix matrix, final MLData input, final int outputNeuron) { double result = 0; // Loop over all input data. for (int i = 0; i < input.size(); i++) { final double diff = input.getData(i) - matrix.get(outputNeuron, i); result += diff * diff; } return BoundMath.sqrt(result); }
public static double calculateRegressionError(MLRegression method, MLDataSet data) { final ErrorCalculation errorCalculation = new ErrorCalculation(); if (method instanceof MLContext) ((MLContext) method).clearContext(); for (final MLDataPair pair : data) { final MLData actual = method.compute(pair.getInput()); errorCalculation.updateError( actual.getData(), pair.getIdeal().getData(), pair.getSignificance()); } return errorCalculation.calculate(); }
@Override public MLData compute(MLData input) { if (input.size() != this.inputCount) { throw new EncogError("Invalid input size, must be " + inputCount); } double[] sum = new double[1]; sum[0] += this.weights[0]; for (int i = 0; i < input.size(); i++) { sum[0] += this.weights[i + 1] * Math.pow(input.getData(i), i + 1); } return new BasicMLData(sum); }
@Override protected void putPrediction(MLData output, Map<StateParameter, Double> nextState) { int i = 0; for (StateParameter stateParameter : StateParameter.values()) if (stateParameter != width && stateParameter != height) nextState.put(stateParameter, output.getData(i++)); }
/** * The main method. * * @param args No arguments are used. */ public static void main(final String args[]) { // create a neural network, without using a factory BasicNetwork network = new BasicNetwork(); network.addLayer(new BasicLayer(null, true, 2)); network.addLayer(new BasicLayer(new ActivationSigmoid(), true, 3)); network.addLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); network.getStructure().finalizeStructure(); network.reset(); // create training data MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL); // train the neural network final StochasticGradientDescent train = new StochasticGradientDescent(network, trainingSet); train.setUpdateRule(new RMSPropUpdate()); int epoch = 1; do { train.iteration(); System.out.println("Epoch #" + epoch + " Error:" + train.getError()); epoch++; } while (train.getError() > 0.01); train.finishTraining(); // test the neural network System.out.println("Neural Network Results:"); for (MLDataPair pair : trainingSet) { final MLData output = network.compute(pair.getInput()); System.out.println( pair.getInput().getData(0) + "," + pair.getInput().getData(1) + ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0)); } PerturbationFeatureImportanceCalc d; Encog.getInstance().shutdown(); }
/** {@inheritDoc} */ @Override public final void iteration() { if (this.mustInit) { initWeights(); } double worstDistance = Double.NEGATIVE_INFINITY; for (final MLDataPair pair : this.training) { final MLData out = this.network.computeInstar(pair.getInput()); // determine winner final int winner = EngineArray.indexOfLargest(out.getData()); // calculate the distance double distance = 0; for (int i = 0; i < pair.getInput().size(); i++) { final double diff = pair.getInput().getData(i) - this.network.getWeightsInputToInstar().get(i, winner); distance += diff * diff; } distance = BoundMath.sqrt(distance); if (distance > worstDistance) { worstDistance = distance; } // train for (int j = 0; j < this.network.getInputCount(); j++) { final double delta = this.learningRate * (pair.getInput().getData(j) - this.network.getWeightsInputToInstar().get(j, winner)); this.network.getWeightsInputToInstar().add(j, winner, delta); } } setError(worstDistance); }
public double[] converteEntradaEmTabuleiro(MLData entrada, boolean mantemNormalizacao) { double[] entradas = entrada.getData(); double[] tabuleiro = new double[9]; for (int i = 0; i < 9; i++) { if (entradas[i] > 0) tabuleiro[i] = mantemNormalizacao ? entradas[i] : Caractere.X.getValor(); else if (entradas[i + 9] < 0) tabuleiro[i] = mantemNormalizacao ? entradas[i + 9] : Caractere.O.getValor(); } return tabuleiro; }
/** * Used to map neurons to actual letters. * * @return The current mapping between neurons and letters as an array. */ char[] mapNeurons() { final char map[] = new char[this.letterListModel.size()]; for (int i = 0; i < map.length; i++) { map[i] = '?'; } for (int i = 0; i < this.letterListModel.size(); i++) { final MLData input = new BasicMLData(5 * 7); int idx = 0; final SampleData ds = (SampleData) this.letterListModel.getElementAt(i); for (int y = 0; y < ds.getHeight(); y++) { for (int x = 0; x < ds.getWidth(); x++) { input.setData(idx++, ds.getData(x, y) ? .5 : -.5); } } final int best = this.net.classify(input); map[best] = ds.getLetter(); } return map; }
/** {@inheritDoc} */ @Override public String denormalizeColumn(ColumnDefinition colDef, MLData data, int dataColumn) { double value = data.getData(dataColumn); final double result = ((colDef.getLow() - colDef.getHigh()) * value - this.normalizedHigh * colDef.getLow() + colDef.getHigh() * this.normalizedLow) / (this.normalizedLow - this.normalizedHigh); // typically caused by a number that should not have been normalized // (i.e. normalization or actual range is infinitely small. if (Double.isNaN(result)) { return "" + (((this.normalizedHigh - this.normalizedLow) / 2) + this.normalizedLow); } return "" + result; }
public double computeProbability(MLData input) { // copy the input to evidence int inputIndex = 0; for (int i = 0; i < this.events.size(); i++) { BayesianEvent event = this.events.get(i); EventState state = this.query.getEventState(event); if (state.getEventType() == EventType.Evidence) { state.setValue((int) input.getData(inputIndex++)); } } // execute the query this.query.execute(); return this.query.getProbability(); }
/** {@inheritDoc} */ @Override public MLData compute(final MLData input) { final MLData output = new BasicMLData(getOutputCount()); this.flat.compute(input.getData(), output.getData()); return output; }
public int winner(MLData output) { return EngineArray.maxIndex(output.getData()); }
public MLMethod decode(final NEATPopulation pop, final Substrate substrate, final Genome genome) { // obtain the CPPN final NEATCODEC neatCodec = new NEATCODEC(); final NEATNetwork cppn = (NEATNetwork) neatCodec.decode(genome); final List<NEATLink> linkList = new ArrayList<NEATLink>(); final ActivationFunction[] afs = new ActivationFunction[substrate.getNodeCount()]; final ActivationFunction af = new ActivationSteepenedSigmoid(); // all activation functions are the same for (int i = 0; i < afs.length; i++) { afs[i] = af; } final double c = this.maxWeight / (1.0 - this.minWeight); final MLData input = new BasicMLData(cppn.getInputCount()); // First create all of the non-bias links. for (final SubstrateLink link : substrate.getLinks()) { final SubstrateNode source = link.getSource(); final SubstrateNode target = link.getTarget(); int index = 0; for (final double d : source.getLocation()) { input.setData(index++, d); } for (final double d : target.getLocation()) { input.setData(index++, d); } final MLData output = cppn.compute(input); double weight = output.getData(0); if (Math.abs(weight) > this.minWeight) { weight = (Math.abs(weight) - this.minWeight) * c * Math.signum(weight); linkList.add(new NEATLink(source.getId(), target.getId(), weight)); } } // now create biased links input.clear(); final int d = substrate.getDimensions(); final List<SubstrateNode> biasedNodes = substrate.getBiasedNodes(); for (final SubstrateNode target : biasedNodes) { for (int i = 0; i < d; i++) { input.setData(d + i, target.getLocation()[i]); } final MLData output = cppn.compute(input); double biasWeight = output.getData(1); if (Math.abs(biasWeight) > this.minWeight) { biasWeight = (Math.abs(biasWeight) - this.minWeight) * c * Math.signum(biasWeight); linkList.add(new NEATLink(0, target.getId(), biasWeight)); } } // check for invalid neural network if (linkList.size() == 0) { return null; } Collections.sort(linkList); final NEATNetwork network = new NEATNetwork(substrate.getInputCount(), substrate.getOutputCount(), linkList, afs); network.setActivationCycles(substrate.getActivationCycles()); return network; }