/**
   * sdf The test main
   *
   * @param args ignored
   */
  public static void main(String[] args) {
    //        Instance[] instances =  new Instance[100];
    //        for (int i = 0; i < instances.length; i++) {
    //            double[] data = new double[2];
    //            data[0] = Math.sin(i/2.0);
    //            data[1] = (Math.random() - .5)*2;
    //            instances[i] = new Instance(data);
    //        }
    DataSet set = new DataSet(trainInstances);
    System.out.println("Before randomizing");
    System.out.println(set);
    //        Matrix projection = new RectangularMatrix(new double[][]{ {.6, .6}, {.4, .6}});
    Matrix projection = new RectangularMatrix(new double[][] {{.1, .1}, {.1, .1}});

    for (int i = 0; i < set.size(); i++) {
      Instance instance = set.get(i);
      instance.setData(projection.times(instance.getData()));
    }
    System.out.println("Before ICA");
    System.out.println(set);
    IndependentComponentAnalysis filter = new IndependentComponentAnalysis(set, 1);
    filter.filter(set);
    System.out.println("After ICA");
    System.out.println(set);
  }
 /**
  * Tests out the perceptron with the classic xor test
  *
  * @param args ignored
  */
 public static void main(String[] args) {
   double[][][] data = {
     {{1, 1, 1, 1}, {0}},
     {{1, 0, 1, 0}, {1}},
     {{0, 1, 0, 1}, {1}},
     {{0, 0, 0, 0}, {0}}
   };
   Instance[] patterns = new Instance[data.length];
   for (int i = 0; i < patterns.length; i++) {
     patterns[i] = new Instance(data[i][0]);
     patterns[i].setLabel(new Instance(data[i][1]));
   }
   FeedForwardNeuralNetworkFactory factory = new FeedForwardNeuralNetworkFactory();
   FeedForwardNetwork network = factory.createClassificationNetwork(new int[] {4, 3, 1});
   ErrorMeasure measure = new SumOfSquaresError();
   DataSet set = new DataSet(patterns);
   NeuralNetworkOptimizationProblem nno =
       new NeuralNetworkOptimizationProblem(set, network, measure);
   OptimizationAlgorithm o = new RandomizedHillClimbing(nno);
   FixedIterationTrainer fit = new FixedIterationTrainer(o, 5000);
   fit.train();
   Instance opt = o.getOptimal();
   network.setWeights(opt.getData());
   for (int i = 0; i < patterns.length; i++) {
     network.setInputValues(patterns[i].getData());
     network.run();
     System.out.println("~~");
     System.out.println(patterns[i].getLabel());
     System.out.println(network.getOutputValues());
   }
 }
예제 #3
0
파일: RCA.java 프로젝트: grosueugen/ml-7641
 private static void saveAsArff(DataSet dataSet, String outputFile, List<String> labels) {
   writeArffHeader(dataSet, outputFile);
   Instance[] instances = dataSet.getInstances();
   for (Instance instance : instances) {
     Vector vector = instance.getData();
     for (int i = 0; i < vector.size(); i++) {
       MLAssignmentUtils.writeToFile(outputFile, String.valueOf(vector.get(i)) + ",", true);
     }
     MLAssignmentUtils.writeToFile(outputFile, getLabel(instance.getLabel(), labels) + "\n", true);
   }
 }
예제 #4
0
파일: RCA.java 프로젝트: grosueugen/ml-7641
 private static String getLabel(Instance label, List<String> labels) {
   Vector vector = label.getData();
   for (int i = 0; i < vector.size(); i++) {
     if (vector.get(i) == 1) {
       return labels.get(i);
     }
   }
   throw new RuntimeException("No label with value found");
 }
예제 #5
0
  public void filter(DataSet data) {
    int foldSize = data.size() / foldCount;
    Random rand = new Random();

    for (int currentFold = 0; currentFold < foldCount; currentFold++) {
      DataSet currentSet = new DataSet(new Instance[foldSize], data.getDescription());
      int i = 0;
      while (i < foldSize) {
        int position = rand.nextInt(data.size());
        Instance instance = data.get(position);
        if (instance != null && instance.getData() != null) {
          currentSet.set(i, instance);
          data.set(position, null);
          i++;
        }
      }
      this.folds.add(currentSet);
    }
  }
  public void run(int iterations) throws Exception {
    // 1) Construct data instances for training.  These will also be run
    //    through the network at the bottom to verify the output
    CSVDataSetReader reader = new CSVDataSetReader("data/letter_training_new.data");
    DataSet set = reader.read();
    LabelSplitFilter flt = new LabelSplitFilter();
    flt.filter(set);
    DataSetLabelBinarySeperator.seperateLabels(set);
    DataSetDescription desc = set.getDescription();
    DataSetDescription labelDesc = desc.getLabelDescription();

    // 2) Instantiate a network using the FeedForwardNeuralNetworkFactory.  This network
    //    will be our classifier.
    FeedForwardNeuralNetworkFactory factory = new FeedForwardNeuralNetworkFactory();
    // 2a) These numbers correspond to the number of nodes in each layer.
    //     This network has 4 input nodes, 3 hidden nodes in 1 layer, and 1 output node in the
    // output layer.
    FeedForwardNetwork network =
        factory.createClassificationNetwork(
            new int[] {
              desc.getAttributeCount(),
              factory.getOptimalHiddenLayerNodes(desc, labelDesc),
              labelDesc.getDiscreteRange()
            });

    // 3) Instantiate a measure, which is used to evaluate each possible set of weights.
    ErrorMeasure measure = new SumOfSquaresError();

    // 4) Instantiate a DataSet, which adapts a set of instances to the optimization problem.
    // DataSet set = new DataSet(patterns);

    // 5) Instantiate an optimization problem, which is used to specify the dataset, evaluation
    //    function, mutator and crossover function (for Genetic Algorithms), and any other
    //    parameters used in optimization.
    NeuralNetworkOptimizationProblem nno =
        new NeuralNetworkOptimizationProblem(set, network, measure);

    // 6) Instantiate a specific OptimizationAlgorithm, which defines how we pick our next potential
    //    hypothesis.
    OptimizationAlgorithm o = new RandomizedHillClimbing(nno);

    // 7) Instantiate a trainer.  The FixtIterationTrainer takes another trainer (in this case,
    //    an OptimizationAlgorithm) and executes it a specified number of times.
    FixedIterationTrainer fit = new FixedIterationTrainer(o, iterations);

    // 8) Run the trainer.  This may take a little while to run, depending on the
    // OptimizationAlgorithm,
    //    size of the data, and number of iterations.
    fit.train();

    // 9) Once training is done, get the optimal solution from the OptimizationAlgorithm.  These are
    // the
    //    optimal weights found for this network.
    Instance opt = o.getOptimal();
    network.setWeights(opt.getData());

    // 10) Run the training data through the network with the weights discovered through
    // optimization, and
    //    print out the expected label and result of the classifier for each instance.
    int[] labels = {0, 1};
    TestMetric acc = new AccuracyTestMetric();
    TestMetric cm = new ConfusionMatrixTestMetric(labels);
    Tester t = new NeuralNetworkTester(network, acc, cm);
    t.test(set.getInstances());

    acc.printResults();
  }