/** * Create a LMA trainer. * * @param method The method to use. * @param training The training data to use. * @param argsStr The arguments to use. * @return The newly created trainer. */ public MLTrain create(final MLMethod method, final MLDataSet training, final String argsStr) { if (!(method instanceof SOM)) { throw new EncogError( "Neighborhood training cannot be used on a method of type: " + method.getClass().getName()); } final Map<String, String> args = ArchitectureParse.parseParams(argsStr); final ParamsHolder holder = new ParamsHolder(args); final double learningRate = holder.getDouble(MLTrainFactory.PROPERTY_LEARNING_RATE, false, 0.7); final String neighborhoodStr = holder.getString(MLTrainFactory.PROPERTY_NEIGHBORHOOD, false, "rbf"); final String rbfTypeStr = holder.getString(MLTrainFactory.PROPERTY_RBF_TYPE, false, "gaussian"); RBFEnum t; if (rbfTypeStr.equalsIgnoreCase("Gaussian")) { t = RBFEnum.Gaussian; } else if (rbfTypeStr.equalsIgnoreCase("Multiquadric")) { t = RBFEnum.Multiquadric; } else if (rbfTypeStr.equalsIgnoreCase("InverseMultiquadric")) { t = RBFEnum.InverseMultiquadric; } else if (rbfTypeStr.equalsIgnoreCase("MexicanHat")) { t = RBFEnum.MexicanHat; } else { t = RBFEnum.Gaussian; } NeighborhoodFunction nf = null; if (neighborhoodStr.equalsIgnoreCase("bubble")) { nf = new NeighborhoodBubble(1); } else if (neighborhoodStr.equalsIgnoreCase("rbf")) { final String str = holder.getString(MLTrainFactory.PROPERTY_DIMENSIONS, true, null); final int[] size = NumberList.fromListInt(CSVFormat.EG_FORMAT, str); nf = new NeighborhoodRBF(size, t); } else if (neighborhoodStr.equalsIgnoreCase("rbf1d")) { nf = new NeighborhoodRBF1D(t); } if (neighborhoodStr.equalsIgnoreCase("single")) { nf = new NeighborhoodSingle(); } final BasicTrainSOM result = new BasicTrainSOM((SOM) method, learningRate, training, nf); if (args.containsKey(MLTrainFactory.PROPERTY_ITERATIONS)) { final int plannedIterations = holder.getInt(MLTrainFactory.PROPERTY_ITERATIONS, false, 1000); final double startRate = holder.getDouble(MLTrainFactory.PROPERTY_START_LEARNING_RATE, false, 0.05); final double endRate = holder.getDouble(MLTrainFactory.PROPERTY_END_LEARNING_RATE, false, 0.05); final double startRadius = holder.getDouble(MLTrainFactory.PROPERTY_START_RADIUS, false, 10); final double endRadius = holder.getDouble(MLTrainFactory.PROPERTY_END_RADIUS, false, 1); result.setAutoDecay(plannedIterations, startRate, endRate, startRadius, endRadius); } return result; }
/** * Create a Manhattan trainer. * * @param method The method to use. * @param training The training data to use. * @param argsStr The arguments to use. * @return The newly created trainer. */ public final MLTrain create( final MLMethod method, final MLDataSet training, final String argsStr) { final Map<String, String> args = ArchitectureParse.parseParams(argsStr); final ParamsHolder holder = new ParamsHolder(args); final double learningRate = holder.getDouble(MLTrainFactory.PROPERTY_LEARNING_RATE, false, 0.1); return new ManhattanPropagation((BasicNetwork) method, training, learningRate); }
/** * Create a SVM trainer. * * @param method The method to use. * @param training The training data to use. * @param argsStr The arguments to use. * @return The newly created trainer. */ public MLTrain create(final MLMethod method, final MLDataSet training, final String argsStr) { if (!(method instanceof SVM)) { throw new EncogError( "SVM Train training cannot be used on a method of type: " + method.getClass().getName()); } final double defaultGamma = 1.0 / ((SVM) method).getInputCount(); final double defaultC = 1.0; final Map<String, String> args = ArchitectureParse.parseParams(argsStr); final ParamsHolder holder = new ParamsHolder(args); final double gamma = holder.getDouble(MLTrainFactory.PROPERTY_GAMMA, false, defaultGamma); final double c = holder.getDouble(MLTrainFactory.PROPERTY_C, false, defaultC); final SVMTrain result = new SVMTrain((SVM) method, training); result.setGamma(gamma); result.setC(c); return result; }
/** * Create a SVM trainer. * * @param method The method to use. * @param training The training data to use. * @param argsStr The arguments to use. * @return The newly created trainer. */ public MLTrain create(final MLMethod method, final MLDataSet training, final String argsStr) { if (!(method instanceof SVM)) { throw new EncogError( "SVM Train training cannot be used on a method of type: " + method.getClass().getName()); } final Map<String, String> args = ArchitectureParse.parseParams(argsStr); new ParamsHolder(args); final ParamsHolder holder = new ParamsHolder(args); final double gammaStart = holder.getDouble( SVMSearchFactory.PROPERTY_GAMMA1, false, SVMSearchTrain.DEFAULT_GAMMA_BEGIN); final double cStart = holder.getDouble(SVMSearchFactory.PROPERTY_C1, false, SVMSearchTrain.DEFAULT_CONST_BEGIN); final double gammaStop = holder.getDouble(SVMSearchFactory.PROPERTY_GAMMA2, false, SVMSearchTrain.DEFAULT_GAMMA_END); final double cStop = holder.getDouble(SVMSearchFactory.PROPERTY_C2, false, SVMSearchTrain.DEFAULT_CONST_END); final double gammaStep = holder.getDouble( SVMSearchFactory.PROPERTY_GAMMA_STEP, false, SVMSearchTrain.DEFAULT_GAMMA_STEP); final double cStep = holder.getDouble( SVMSearchFactory.PROPERTY_C_STEP, false, SVMSearchTrain.DEFAULT_CONST_STEP); final SVMSearchTrain result = new SVMSearchTrain((SVM) method, training); result.setGammaBegin(gammaStart); result.setGammaEnd(gammaStop); result.setGammaStep(gammaStep); result.setConstBegin(cStart); result.setConstEnd(cStop); result.setConstStep(cStep); return result; }