/**
   * Create a feed forward network.
   *
   * @param architecture The architecture string to use.
   * @param input The input count.
   * @param output The output count.
   * @return The feedforward network.
   */
  public final MLMethod create(final String architecture, final int input, final int output) {

    if (input <= 0) {
      throw new EncogError("Must have at least one input for feedforward.");
    }

    if (output <= 0) {
      throw new EncogError("Must have at least one output for feedforward.");
    }

    final BasicNetwork result = new BasicNetwork();
    final List<String> layers = ArchitectureParse.parseLayers(architecture);
    ActivationFunction af = new ActivationLinear();

    int questionPhase = 0;
    for (final String layerStr : layers) {
      int defaultCount;
      // determine default
      if (questionPhase == 0) {
        defaultCount = input;
      } else {
        defaultCount = output;
      }

      final ArchitectureLayer layer = ArchitectureParse.parseLayer(layerStr, defaultCount);
      final boolean bias = layer.isBias();

      String part = layer.getName();
      if (part != null) {
        part = part.trim();
      } else {
        part = "";
      }

      ActivationFunction lookup = this.factory.create(part);

      if (lookup != null) {
        af = lookup;
      } else {
        if (layer.isUsedDefault()) {
          questionPhase++;
          if (questionPhase > 2) {
            throw new EncogError("Only two ?'s may be used.");
          }
        }

        if (layer.getCount() == 0) {
          throw new EncogError(
              "Unknown architecture element: " + architecture + ", can't parse: " + part);
        }

        result.addLayer(new BasicLayer(af, bias, layer.getCount()));
      }
    }

    result.getStructure().finalizeStructure();
    result.reset();

    return result;
  }
  /**
   * Parse parameters.
   *
   * @param line The line to parse.
   * @return The parsed values.
   */
  public static Map<String, String> parseParams(final String line) {
    final Map<String, String> result = new HashMap<String, String>();

    final SimpleParser parser = new SimpleParser(line);

    while (!parser.eol()) {
      final String name = ArchitectureParse.parseName(parser).toUpperCase();

      parser.eatWhiteSpace();
      if (!parser.lookAhead("=", false)) {
        throw new EncogError("Missing equals(=) operator.");
      } else {
        parser.advance();
      }

      final String value = ArchitectureParse.parseValue(parser);

      result.put(name.toUpperCase(), value);

      if (!parser.parseThroughComma()) {
        break;
      }
    }

    return result;
  }
示例#3
0
  /**
   * Create a NEAT population.
   *
   * @param architecture The architecture string to use.
   * @param input The input count.
   * @param output The output count.
   * @return The population.
   */
  public MLMethod create(final String architecture, final int input, final int output) {

    if (input <= 0) {
      throw new EncogError("Must have at least one input for NEAT.");
    }

    if (output <= 0) {
      throw new EncogError("Must have at least one output for NEAT.");
    }

    final Map<String, String> args = ArchitectureParse.parseParams(architecture);
    final ParamsHolder holder = new ParamsHolder(args);

    final int populationSize = holder.getInt(MLMethodFactory.PROPERTY_POPULATION_SIZE, false, 1000);

    final int cycles =
        holder.getInt(MLMethodFactory.PROPERTY_CYCLES, false, NEATPopulation.DEFAULT_CYCLES);

    ActivationFunction af =
        this.factory.create(
            holder.getString(MLMethodFactory.PROPERTY_AF, false, MLActivationFactory.AF_SSIGMOID));

    NEATPopulation pop = new NEATPopulation(input, output, populationSize);
    pop.reset();
    pop.setActivationCycles(cycles);
    pop.setNEATActivationFunction(af);

    return pop;
  }
  /**
   * Create a LMA trainer.
   *
   * @param method The method to use.
   * @param training The training data to use.
   * @param argsStr The arguments to use.
   * @return The newly created trainer.
   */
  public MLTrain create(final MLMethod method, final MLDataSet training, final String argsStr) {

    if (!(method instanceof SOM)) {
      throw new EncogError(
          "Neighborhood training cannot be used on a method of type: "
              + method.getClass().getName());
    }

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);

    final double learningRate = holder.getDouble(MLTrainFactory.PROPERTY_LEARNING_RATE, false, 0.7);
    final String neighborhoodStr =
        holder.getString(MLTrainFactory.PROPERTY_NEIGHBORHOOD, false, "rbf");
    final String rbfTypeStr = holder.getString(MLTrainFactory.PROPERTY_RBF_TYPE, false, "gaussian");

    RBFEnum t;

    if (rbfTypeStr.equalsIgnoreCase("Gaussian")) {
      t = RBFEnum.Gaussian;
    } else if (rbfTypeStr.equalsIgnoreCase("Multiquadric")) {
      t = RBFEnum.Multiquadric;
    } else if (rbfTypeStr.equalsIgnoreCase("InverseMultiquadric")) {
      t = RBFEnum.InverseMultiquadric;
    } else if (rbfTypeStr.equalsIgnoreCase("MexicanHat")) {
      t = RBFEnum.MexicanHat;
    } else {
      t = RBFEnum.Gaussian;
    }

    NeighborhoodFunction nf = null;

    if (neighborhoodStr.equalsIgnoreCase("bubble")) {
      nf = new NeighborhoodBubble(1);
    } else if (neighborhoodStr.equalsIgnoreCase("rbf")) {
      final String str = holder.getString(MLTrainFactory.PROPERTY_DIMENSIONS, true, null);
      final int[] size = NumberList.fromListInt(CSVFormat.EG_FORMAT, str);
      nf = new NeighborhoodRBF(size, t);
    } else if (neighborhoodStr.equalsIgnoreCase("rbf1d")) {
      nf = new NeighborhoodRBF1D(t);
    }
    if (neighborhoodStr.equalsIgnoreCase("single")) {
      nf = new NeighborhoodSingle();
    }

    final BasicTrainSOM result = new BasicTrainSOM((SOM) method, learningRate, training, nf);

    if (args.containsKey(MLTrainFactory.PROPERTY_ITERATIONS)) {
      final int plannedIterations = holder.getInt(MLTrainFactory.PROPERTY_ITERATIONS, false, 1000);
      final double startRate =
          holder.getDouble(MLTrainFactory.PROPERTY_START_LEARNING_RATE, false, 0.05);
      final double endRate =
          holder.getDouble(MLTrainFactory.PROPERTY_END_LEARNING_RATE, false, 0.05);
      final double startRadius = holder.getDouble(MLTrainFactory.PROPERTY_START_RADIUS, false, 10);
      final double endRadius = holder.getDouble(MLTrainFactory.PROPERTY_END_RADIUS, false, 1);
      result.setAutoDecay(plannedIterations, startRate, endRate, startRadius, endRadius);
    }

    return result;
  }
  /**
   * Create a Manhattan trainer.
   *
   * @param method The method to use.
   * @param training The training data to use.
   * @param argsStr The arguments to use.
   * @return The newly created trainer.
   */
  public final MLTrain create(
      final MLMethod method, final MLDataSet training, final String argsStr) {

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);

    final double learningRate = holder.getDouble(MLTrainFactory.PROPERTY_LEARNING_RATE, false, 0.1);

    return new ManhattanPropagation((BasicNetwork) method, training, learningRate);
  }
  /**
   * parse a layer.
   *
   * @param line The line to parse.
   * @param defaultValue The default value.
   * @return The parsed ArchitectureLayer.
   */
  public static ArchitectureLayer parseLayer(final String line, final int defaultValue) {
    final ArchitectureLayer layer = new ArchitectureLayer();

    String check = line.trim().toUpperCase();

    // first check for bias
    if (check.endsWith(":B")) {
      check = check.substring(0, check.length() - 2);
      layer.setBias(true);
    }

    // see if simple number
    try {
      layer.setCount(Integer.parseInt(check));
      if (layer.getCount() < 0) {
        throw new EncogError("Count cannot be less than zero.");
      }
    } catch (final NumberFormatException f) {
      // don't really care!  Just checking to see if its a number.
    }

    // see if it is a default
    if ("?".equals(check)) {
      if (defaultValue < 0) {
        throw new EncogError("Default (?) in an invalid location.");
      } else {
        layer.setCount(defaultValue);
        layer.setUsedDefault(true);
        return layer;
      }
    }

    // single item, no function
    final int startIndex = check.indexOf('(');
    final int endIndex = check.lastIndexOf(')');
    if (startIndex == -1) {
      layer.setName(check);
      return layer;
    }

    // function
    if (endIndex == -1) {
      throw new EncogError("Illegal parentheses.");
    }

    layer.setName(check.substring(0, startIndex).trim());

    final String paramStr = check.substring(startIndex + 1, endIndex);
    final Map<String, String> params = ArchitectureParse.parseParams(paramStr);
    layer.getParams().putAll(params);
    return layer;
  }
  /**
   * Create a RBF network.
   *
   * @param architecture THe architecture string to use.
   * @param input The input count.
   * @param output The output count.
   * @return The RBF network.
   */
  public MLMethod create(final String architecture, final int input, final int output) {

    final List<String> layers = ArchitectureParse.parseLayers(architecture);
    if (layers.size() != MAX_LAYERS) {
      throw new EncogError("RBF Networks must have exactly three elements, " + "separated by ->.");
    }

    final ArchitectureLayer inputLayer = ArchitectureParse.parseLayer(layers.get(0), input);
    final ArchitectureLayer rbfLayer = ArchitectureParse.parseLayer(layers.get(1), -1);
    final ArchitectureLayer outputLayer = ArchitectureParse.parseLayer(layers.get(2), output);

    final int inputCount = inputLayer.getCount();
    final int outputCount = outputLayer.getCount();

    RBFEnum t;

    if (rbfLayer.getName().equalsIgnoreCase("Gaussian")) {
      t = RBFEnum.Gaussian;
    } else if (rbfLayer.getName().equalsIgnoreCase("Multiquadric")) {
      t = RBFEnum.Multiquadric;
    } else if (rbfLayer.getName().equalsIgnoreCase("InverseMultiquadric")) {
      t = RBFEnum.InverseMultiquadric;
    } else if (rbfLayer.getName().equalsIgnoreCase("MexicanHat")) {
      t = RBFEnum.MexicanHat;
    } else {
      throw new NeuralNetworkError("Unknown RBF: " + rbfLayer.getName());
    }

    final ParamsHolder holder = new ParamsHolder(rbfLayer.getParams());

    final int rbfCount = holder.getInt("C", true, 0);

    final RBFNetwork result = new RBFNetwork(inputCount, rbfCount, outputCount, t);

    return result;
  }
  /**
   * Create a SVM trainer.
   *
   * @param method The method to use.
   * @param training The training data to use.
   * @param argsStr The arguments to use.
   * @return The newly created trainer.
   */
  public MLTrain create(final MLMethod method, final MLDataSet training, final String argsStr) {

    if (!(method instanceof SVM)) {
      throw new EncogError(
          "SVM Train training cannot be used on a method of type: " + method.getClass().getName());
    }

    final double defaultGamma = 1.0 / ((SVM) method).getInputCount();
    final double defaultC = 1.0;

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);
    final double gamma = holder.getDouble(MLTrainFactory.PROPERTY_GAMMA, false, defaultGamma);
    final double c = holder.getDouble(MLTrainFactory.PROPERTY_C, false, defaultC);

    final SVMTrain result = new SVMTrain((SVM) method, training);
    result.setGamma(gamma);
    result.setC(c);
    return result;
  }
  /**
   * Create a SVM trainer.
   *
   * @param method The method to use.
   * @param training The training data to use.
   * @param argsStr The arguments to use.
   * @return The newly created trainer.
   */
  public MLTrain create(final MLMethod method, final MLDataSet training, final String argsStr) {

    if (!(method instanceof SVM)) {
      throw new EncogError(
          "SVM Train training cannot be used on a method of type: " + method.getClass().getName());
    }

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    new ParamsHolder(args);

    final ParamsHolder holder = new ParamsHolder(args);
    final double gammaStart =
        holder.getDouble(
            SVMSearchFactory.PROPERTY_GAMMA1, false, SVMSearchTrain.DEFAULT_GAMMA_BEGIN);
    final double cStart =
        holder.getDouble(SVMSearchFactory.PROPERTY_C1, false, SVMSearchTrain.DEFAULT_CONST_BEGIN);
    final double gammaStop =
        holder.getDouble(SVMSearchFactory.PROPERTY_GAMMA2, false, SVMSearchTrain.DEFAULT_GAMMA_END);
    final double cStop =
        holder.getDouble(SVMSearchFactory.PROPERTY_C2, false, SVMSearchTrain.DEFAULT_CONST_END);
    final double gammaStep =
        holder.getDouble(
            SVMSearchFactory.PROPERTY_GAMMA_STEP, false, SVMSearchTrain.DEFAULT_GAMMA_STEP);
    final double cStep =
        holder.getDouble(
            SVMSearchFactory.PROPERTY_C_STEP, false, SVMSearchTrain.DEFAULT_CONST_STEP);

    final SVMSearchTrain result = new SVMSearchTrain((SVM) method, training);

    result.setGammaBegin(gammaStart);
    result.setGammaEnd(gammaStop);
    result.setGammaStep(gammaStep);
    result.setConstBegin(cStart);
    result.setConstEnd(cStop);
    result.setConstStep(cStep);

    return result;
  }
示例#10
0
  /**
   * Create the SVM.
   *
   * @param architecture The architecture string.
   * @param input The input count.
   * @param output The output count.
   * @return The newly created SVM.
   */
  public final MLMethod create(final String architecture, final int input, final int output) {

    final List<String> layers = ArchitectureParse.parseLayers(architecture);
    if (layers.size() != MAX_LAYERS) {
      throw new EncogError("SVM's must have exactly three elements, separated by ->.");
    }

    final ArchitectureLayer inputLayer = ArchitectureParse.parseLayer(layers.get(0), input);
    final ArchitectureLayer paramsLayer = ArchitectureParse.parseLayer(layers.get(1), input);
    final ArchitectureLayer outputLayer = ArchitectureParse.parseLayer(layers.get(2), output);

    final String name = paramsLayer.getName();
    final String kernelStr = paramsLayer.getParams().get("KERNEL");
    final String svmTypeStr = paramsLayer.getParams().get("TYPE");

    SVMType svmType = SVMType.NewSupportVectorClassification;
    KernelType kernelType = KernelType.RadialBasisFunction;

    boolean useNew = true;

    if (svmTypeStr == null) {
      useNew = true;
    } else if (svmTypeStr.equalsIgnoreCase("NEW")) {
      useNew = true;
    } else if (svmTypeStr.equalsIgnoreCase("OLD")) {
      useNew = false;
    } else {
      throw new EncogError("Unsupported type: " + svmTypeStr + ", must be NEW or OLD.");
    }

    if (name.equalsIgnoreCase("C")) {
      if (useNew) {
        svmType = SVMType.NewSupportVectorClassification;
      } else {
        svmType = SVMType.SupportVectorClassification;
      }
    } else if (name.equalsIgnoreCase("R")) {
      if (useNew) {
        svmType = SVMType.NewSupportVectorRegression;
      } else {
        svmType = SVMType.EpsilonSupportVectorRegression;
      }
    } else {
      throw new EncogError(
          "Unsupported mode: " + name + ", must be C for classify or R for regression.");
    }

    if (kernelStr == null) {
      kernelType = KernelType.RadialBasisFunction;
    } else if ("linear".equalsIgnoreCase(kernelStr)) {
      kernelType = KernelType.Linear;
    } else if ("poly".equalsIgnoreCase(kernelStr)) {
      kernelType = KernelType.Poly;
    } else if ("precomputed".equalsIgnoreCase(kernelStr)) {
      kernelType = KernelType.Precomputed;
    } else if ("rbf".equalsIgnoreCase(kernelStr)) {
      kernelType = KernelType.RadialBasisFunction;
    } else if ("sigmoid".equalsIgnoreCase(kernelStr)) {
      kernelType = KernelType.Sigmoid;
    } else {
      throw new EncogError(
          "Unsupported kernel: " + kernelStr + ", must be linear,poly,precomputed,rbf or sigmoid.");
    }

    final int inputCount = inputLayer.getCount();
    final int outputCount = outputLayer.getCount();

    if (outputCount != 1) {
      throw new EncogError("SVM can only have an output size of 1.");
    }

    final SVM result = new SVM(inputCount, svmType, kernelType);

    return result;
  }