/** * Create a NEAT population. * * @param architecture The architecture string to use. * @param input The input count. * @param output The output count. * @return The population. */ public MLMethod create(final String architecture, final int input, final int output) { if (input <= 0) { throw new EncogError("Must have at least one input for NEAT."); } if (output <= 0) { throw new EncogError("Must have at least one output for NEAT."); } final Map<String, String> args = ArchitectureParse.parseParams(architecture); final ParamsHolder holder = new ParamsHolder(args); final int populationSize = holder.getInt(MLMethodFactory.PROPERTY_POPULATION_SIZE, false, 1000); final int cycles = holder.getInt(MLMethodFactory.PROPERTY_CYCLES, false, NEATPopulation.DEFAULT_CYCLES); ActivationFunction af = this.factory.create( holder.getString(MLMethodFactory.PROPERTY_AF, false, MLActivationFactory.AF_SSIGMOID)); NEATPopulation pop = new NEATPopulation(input, output, populationSize); pop.reset(); pop.setActivationCycles(cycles); pop.setNEATActivationFunction(af); return pop; }
/** * Create a LMA trainer. * * @param method The method to use. * @param training The training data to use. * @param argsStr The arguments to use. * @return The newly created trainer. */ public MLTrain create(final MLMethod method, final MLDataSet training, final String argsStr) { if (!(method instanceof SOM)) { throw new EncogError( "Neighborhood training cannot be used on a method of type: " + method.getClass().getName()); } final Map<String, String> args = ArchitectureParse.parseParams(argsStr); final ParamsHolder holder = new ParamsHolder(args); final double learningRate = holder.getDouble(MLTrainFactory.PROPERTY_LEARNING_RATE, false, 0.7); final String neighborhoodStr = holder.getString(MLTrainFactory.PROPERTY_NEIGHBORHOOD, false, "rbf"); final String rbfTypeStr = holder.getString(MLTrainFactory.PROPERTY_RBF_TYPE, false, "gaussian"); RBFEnum t; if (rbfTypeStr.equalsIgnoreCase("Gaussian")) { t = RBFEnum.Gaussian; } else if (rbfTypeStr.equalsIgnoreCase("Multiquadric")) { t = RBFEnum.Multiquadric; } else if (rbfTypeStr.equalsIgnoreCase("InverseMultiquadric")) { t = RBFEnum.InverseMultiquadric; } else if (rbfTypeStr.equalsIgnoreCase("MexicanHat")) { t = RBFEnum.MexicanHat; } else { t = RBFEnum.Gaussian; } NeighborhoodFunction nf = null; if (neighborhoodStr.equalsIgnoreCase("bubble")) { nf = new NeighborhoodBubble(1); } else if (neighborhoodStr.equalsIgnoreCase("rbf")) { final String str = holder.getString(MLTrainFactory.PROPERTY_DIMENSIONS, true, null); final int[] size = NumberList.fromListInt(CSVFormat.EG_FORMAT, str); nf = new NeighborhoodRBF(size, t); } else if (neighborhoodStr.equalsIgnoreCase("rbf1d")) { nf = new NeighborhoodRBF1D(t); } if (neighborhoodStr.equalsIgnoreCase("single")) { nf = new NeighborhoodSingle(); } final BasicTrainSOM result = new BasicTrainSOM((SOM) method, learningRate, training, nf); if (args.containsKey(MLTrainFactory.PROPERTY_ITERATIONS)) { final int plannedIterations = holder.getInt(MLTrainFactory.PROPERTY_ITERATIONS, false, 1000); final double startRate = holder.getDouble(MLTrainFactory.PROPERTY_START_LEARNING_RATE, false, 0.05); final double endRate = holder.getDouble(MLTrainFactory.PROPERTY_END_LEARNING_RATE, false, 0.05); final double startRadius = holder.getDouble(MLTrainFactory.PROPERTY_START_RADIUS, false, 10); final double endRadius = holder.getDouble(MLTrainFactory.PROPERTY_END_RADIUS, false, 1); result.setAutoDecay(plannedIterations, startRate, endRate, startRadius, endRadius); } return result; }
/** * Create a Manhattan trainer. * * @param method The method to use. * @param training The training data to use. * @param argsStr The arguments to use. * @return The newly created trainer. */ public final MLTrain create( final MLMethod method, final MLDataSet training, final String argsStr) { final Map<String, String> args = ArchitectureParse.parseParams(argsStr); final ParamsHolder holder = new ParamsHolder(args); final double learningRate = holder.getDouble(MLTrainFactory.PROPERTY_LEARNING_RATE, false, 0.1); return new ManhattanPropagation((BasicNetwork) method, training, learningRate); }
/** * Create a SVM trainer. * * @param method The method to use. * @param training The training data to use. * @param argsStr The arguments to use. * @return The newly created trainer. */ public MLTrain create(final MLMethod method, final MLDataSet training, final String argsStr) { if (!(method instanceof SVM)) { throw new EncogError( "SVM Train training cannot be used on a method of type: " + method.getClass().getName()); } final double defaultGamma = 1.0 / ((SVM) method).getInputCount(); final double defaultC = 1.0; final Map<String, String> args = ArchitectureParse.parseParams(argsStr); final ParamsHolder holder = new ParamsHolder(args); final double gamma = holder.getDouble(MLTrainFactory.PROPERTY_GAMMA, false, defaultGamma); final double c = holder.getDouble(MLTrainFactory.PROPERTY_C, false, defaultC); final SVMTrain result = new SVMTrain((SVM) method, training); result.setGamma(gamma); result.setC(c); return result; }
/** * Create a RBF network. * * @param architecture THe architecture string to use. * @param input The input count. * @param output The output count. * @return The RBF network. */ public MLMethod create(final String architecture, final int input, final int output) { final List<String> layers = ArchitectureParse.parseLayers(architecture); if (layers.size() != MAX_LAYERS) { throw new EncogError("RBF Networks must have exactly three elements, " + "separated by ->."); } final ArchitectureLayer inputLayer = ArchitectureParse.parseLayer(layers.get(0), input); final ArchitectureLayer rbfLayer = ArchitectureParse.parseLayer(layers.get(1), -1); final ArchitectureLayer outputLayer = ArchitectureParse.parseLayer(layers.get(2), output); final int inputCount = inputLayer.getCount(); final int outputCount = outputLayer.getCount(); RBFEnum t; if (rbfLayer.getName().equalsIgnoreCase("Gaussian")) { t = RBFEnum.Gaussian; } else if (rbfLayer.getName().equalsIgnoreCase("Multiquadric")) { t = RBFEnum.Multiquadric; } else if (rbfLayer.getName().equalsIgnoreCase("InverseMultiquadric")) { t = RBFEnum.InverseMultiquadric; } else if (rbfLayer.getName().equalsIgnoreCase("MexicanHat")) { t = RBFEnum.MexicanHat; } else { throw new NeuralNetworkError("Unknown RBF: " + rbfLayer.getName()); } final ParamsHolder holder = new ParamsHolder(rbfLayer.getParams()); final int rbfCount = holder.getInt("C", true, 0); final RBFNetwork result = new RBFNetwork(inputCount, rbfCount, outputCount, t); return result; }
/** * Create a SVM trainer. * * @param method The method to use. * @param training The training data to use. * @param argsStr The arguments to use. * @return The newly created trainer. */ public MLTrain create(final MLMethod method, final MLDataSet training, final String argsStr) { if (!(method instanceof SVM)) { throw new EncogError( "SVM Train training cannot be used on a method of type: " + method.getClass().getName()); } final Map<String, String> args = ArchitectureParse.parseParams(argsStr); new ParamsHolder(args); final ParamsHolder holder = new ParamsHolder(args); final double gammaStart = holder.getDouble( SVMSearchFactory.PROPERTY_GAMMA1, false, SVMSearchTrain.DEFAULT_GAMMA_BEGIN); final double cStart = holder.getDouble(SVMSearchFactory.PROPERTY_C1, false, SVMSearchTrain.DEFAULT_CONST_BEGIN); final double gammaStop = holder.getDouble(SVMSearchFactory.PROPERTY_GAMMA2, false, SVMSearchTrain.DEFAULT_GAMMA_END); final double cStop = holder.getDouble(SVMSearchFactory.PROPERTY_C2, false, SVMSearchTrain.DEFAULT_CONST_END); final double gammaStep = holder.getDouble( SVMSearchFactory.PROPERTY_GAMMA_STEP, false, SVMSearchTrain.DEFAULT_GAMMA_STEP); final double cStep = holder.getDouble( SVMSearchFactory.PROPERTY_C_STEP, false, SVMSearchTrain.DEFAULT_CONST_STEP); final SVMSearchTrain result = new SVMSearchTrain((SVM) method, training); result.setGammaBegin(gammaStart); result.setGammaEnd(gammaStop); result.setGammaStep(gammaStep); result.setConstBegin(cStart); result.setConstEnd(cStop); result.setConstStep(cStep); return result; }