/** * Create a feed forward network. * * @param architecture The architecture string to use. * @param input The input count. * @param output The output count. * @return The feedforward network. */ public final MLMethod create(final String architecture, final int input, final int output) { if (input <= 0) { throw new EncogError("Must have at least one input for feedforward."); } if (output <= 0) { throw new EncogError("Must have at least one output for feedforward."); } final BasicNetwork result = new BasicNetwork(); final List<String> layers = ArchitectureParse.parseLayers(architecture); ActivationFunction af = new ActivationLinear(); int questionPhase = 0; for (final String layerStr : layers) { int defaultCount; // determine default if (questionPhase == 0) { defaultCount = input; } else { defaultCount = output; } final ArchitectureLayer layer = ArchitectureParse.parseLayer(layerStr, defaultCount); final boolean bias = layer.isBias(); String part = layer.getName(); if (part != null) { part = part.trim(); } else { part = ""; } ActivationFunction lookup = this.factory.create(part); if (lookup != null) { af = lookup; } else { if (layer.isUsedDefault()) { questionPhase++; if (questionPhase > 2) { throw new EncogError("Only two ?'s may be used."); } } if (layer.getCount() == 0) { throw new EncogError( "Unknown architecture element: " + architecture + ", can't parse: " + part); } result.addLayer(new BasicLayer(af, bias, layer.getCount())); } } result.getStructure().finalizeStructure(); result.reset(); return result; }
/** * Create a RBF network. * * @param architecture THe architecture string to use. * @param input The input count. * @param output The output count. * @return The RBF network. */ public MLMethod create(final String architecture, final int input, final int output) { final List<String> layers = ArchitectureParse.parseLayers(architecture); if (layers.size() != MAX_LAYERS) { throw new EncogError("RBF Networks must have exactly three elements, " + "separated by ->."); } final ArchitectureLayer inputLayer = ArchitectureParse.parseLayer(layers.get(0), input); final ArchitectureLayer rbfLayer = ArchitectureParse.parseLayer(layers.get(1), -1); final ArchitectureLayer outputLayer = ArchitectureParse.parseLayer(layers.get(2), output); final int inputCount = inputLayer.getCount(); final int outputCount = outputLayer.getCount(); RBFEnum t; if (rbfLayer.getName().equalsIgnoreCase("Gaussian")) { t = RBFEnum.Gaussian; } else if (rbfLayer.getName().equalsIgnoreCase("Multiquadric")) { t = RBFEnum.Multiquadric; } else if (rbfLayer.getName().equalsIgnoreCase("InverseMultiquadric")) { t = RBFEnum.InverseMultiquadric; } else if (rbfLayer.getName().equalsIgnoreCase("MexicanHat")) { t = RBFEnum.MexicanHat; } else { throw new NeuralNetworkError("Unknown RBF: " + rbfLayer.getName()); } final ParamsHolder holder = new ParamsHolder(rbfLayer.getParams()); final int rbfCount = holder.getInt("C", true, 0); final RBFNetwork result = new RBFNetwork(inputCount, rbfCount, outputCount, t); return result; }
/** * Create the SVM. * * @param architecture The architecture string. * @param input The input count. * @param output The output count. * @return The newly created SVM. */ public final MLMethod create(final String architecture, final int input, final int output) { final List<String> layers = ArchitectureParse.parseLayers(architecture); if (layers.size() != MAX_LAYERS) { throw new EncogError("SVM's must have exactly three elements, separated by ->."); } final ArchitectureLayer inputLayer = ArchitectureParse.parseLayer(layers.get(0), input); final ArchitectureLayer paramsLayer = ArchitectureParse.parseLayer(layers.get(1), input); final ArchitectureLayer outputLayer = ArchitectureParse.parseLayer(layers.get(2), output); final String name = paramsLayer.getName(); final String kernelStr = paramsLayer.getParams().get("KERNEL"); final String svmTypeStr = paramsLayer.getParams().get("TYPE"); SVMType svmType = SVMType.NewSupportVectorClassification; KernelType kernelType = KernelType.RadialBasisFunction; boolean useNew = true; if (svmTypeStr == null) { useNew = true; } else if (svmTypeStr.equalsIgnoreCase("NEW")) { useNew = true; } else if (svmTypeStr.equalsIgnoreCase("OLD")) { useNew = false; } else { throw new EncogError("Unsupported type: " + svmTypeStr + ", must be NEW or OLD."); } if (name.equalsIgnoreCase("C")) { if (useNew) { svmType = SVMType.NewSupportVectorClassification; } else { svmType = SVMType.SupportVectorClassification; } } else if (name.equalsIgnoreCase("R")) { if (useNew) { svmType = SVMType.NewSupportVectorRegression; } else { svmType = SVMType.EpsilonSupportVectorRegression; } } else { throw new EncogError( "Unsupported mode: " + name + ", must be C for classify or R for regression."); } if (kernelStr == null) { kernelType = KernelType.RadialBasisFunction; } else if ("linear".equalsIgnoreCase(kernelStr)) { kernelType = KernelType.Linear; } else if ("poly".equalsIgnoreCase(kernelStr)) { kernelType = KernelType.Poly; } else if ("precomputed".equalsIgnoreCase(kernelStr)) { kernelType = KernelType.Precomputed; } else if ("rbf".equalsIgnoreCase(kernelStr)) { kernelType = KernelType.RadialBasisFunction; } else if ("sigmoid".equalsIgnoreCase(kernelStr)) { kernelType = KernelType.Sigmoid; } else { throw new EncogError( "Unsupported kernel: " + kernelStr + ", must be linear,poly,precomputed,rbf or sigmoid."); } final int inputCount = inputLayer.getCount(); final int outputCount = outputLayer.getCount(); if (outputCount != 1) { throw new EncogError("SVM can only have an output size of 1."); } final SVM result = new SVM(inputCount, svmType, kernelType); return result; }