示例#1
0
文件: Main.java 项目: alexrook/seisus
  public static void main(String[] args) {

    Main m = new Main();

    if (args.length > 0) {
      if (args[0].toLowerCase().contains("v")) {
        m.showVersion();
        System.exit(0);
      }

      m.showUsage();

      System.exit(0);
    }

    File curDir = new File(".");
    File[] vectors = curDir.listFiles(m);

    if (vectors.length != 3) {
      m.showUsage();
      System.exit(1);
    }

    for (File file : vectors) {
      VectorParser parser = new VectorParser();
      String fileName = file.getName().toLowerCase();
      if (fileName.endsWith("x.txt")) {
        parser.setVectorType(VectorParser.VectorType.X);
      } else if (fileName.endsWith("y.txt")) {
        parser.setVectorType(VectorParser.VectorType.Y);
      } else if (fileName.endsWith("z.txt")) {
        parser.setVectorType(VectorParser.VectorType.Z);
      }
      try {
        parser.parse(file);
      } catch (IOException ex) {
        m.showError(ex);
        System.exit(1);
      }
    }

    try {
      W.getInstace().write();
    } catch (IOException ex) {
      m.showError(ex);
      System.exit(1);
    }
  }
  /**
   * Creates and returns a new instance of Multi Layer Perceptron
   *
   * @param layersStr space separated number of neurons in layers
   * @param transferFunctionType transfer function type for neurons
   * @return instance of Multi Layer Perceptron
   */
  public static MultiLayerPerceptron createMLPerceptron(
      String layersStr,
      TransferFunctionType transferFunctionType,
      Class learningRule,
      boolean useBias,
      boolean connectIO) {
    ArrayList<Integer> layerSizes = VectorParser.parseInteger(layersStr);
    NeuronProperties neuronProperties = new NeuronProperties(transferFunctionType, useBias);
    MultiLayerPerceptron nnet = new MultiLayerPerceptron(layerSizes, neuronProperties);

    // set learning rule - TODO: use reflection here
    if (learningRule.getName().equals(BackPropagation.class.getName())) {
      nnet.setLearningRule(new BackPropagation());
    } else if (learningRule.getName().equals(MomentumBackpropagation.class.getName())) {
      nnet.setLearningRule(new MomentumBackpropagation());
    } else if (learningRule.getName().equals(DynamicBackPropagation.class.getName())) {
      nnet.setLearningRule(new DynamicBackPropagation());
    } else if (learningRule.getName().equals(ResilientPropagation.class.getName())) {
      nnet.setLearningRule(new ResilientPropagation());
    }

    // connect io
    if (connectIO) {
      nnet.connectInputsToOutputs();
    }

    return nnet;
  }
 /**
  * Creates and returns a new instance of Multi Layer Perceptron
  *
  * @param layersStr space separated number of neurons in layers
  * @param transferFunctionType transfer function type for neurons
  * @return instance of Multi Layer Perceptron
  */
 public static MultiLayerPerceptron createMLPerceptron(
     String layersStr, TransferFunctionType transferFunctionType) {
   ArrayList<Integer> layerSizes = VectorParser.parseInteger(layersStr);
   MultiLayerPerceptron nnet = new MultiLayerPerceptron(layerSizes, transferFunctionType);
   return nnet;
 }