Example #1
0
  @Override
  public CategoricalResults classify(DataPoint data) {
    CategoricalResults cr = new CategoricalResults(predicting.getNumOfCategories());

    // Use a priority que so that we always pick the two lowest value class labels, makes indexing
    // into the oneVsOne array simple
    PriorityQueue<Integer> options = new PriorityQueue<Integer>(predicting.getNumOfCategories());
    for (int i = 0; i < cr.size(); i++) options.add(i);

    CategoricalResults subRes;
    int c1, c2;
    // We will now loop through and repeatedly pick two combinations, and eliminate the loser, until
    // there is one winer
    while (options.size() > 1) {
      c1 = options.poll();
      c2 = options.poll();

      subRes = oneVone[c1][c2 - c1 - 1].classify(data);

      if (subRes.mostLikely() == 0) // c1 wins, c2 no longer a candidate
      options.add(c1);
      else // c2 wins, c1 no onger a candidate
      options.add(c2);
    }

    cr.setProb(options.peek(), 1.0);

    return cr;
  }
Example #2
0
  @Override
  public void trainC(ClassificationDataSet dataSet, ExecutorService threadPool) {
    final int models = baseClassifiers.size();
    final int C = dataSet.getClassSize();
    weightsPerModel = C == 2 ? 1 : C;
    ClassificationDataSet metaSet =
        new ClassificationDataSet(
            weightsPerModel * models, new CategoricalData[0], dataSet.getPredicting());

    List<ClassificationDataSet> dataFolds = dataSet.cvSet(folds);
    // iterate in the order of the folds so we get the right dataum weights
    for (ClassificationDataSet cds : dataFolds)
      for (int i = 0; i < cds.getSampleSize(); i++)
        metaSet.addDataPoint(
            new DenseVector(weightsPerModel * models),
            cds.getDataPointCategory(i),
            cds.getDataPoint(i).getWeight());

    // create the meta training set
    for (int c = 0; c < baseClassifiers.size(); c++) {
      Classifier cl = baseClassifiers.get(c);
      int pos = 0;
      for (int f = 0; f < dataFolds.size(); f++) {
        ClassificationDataSet train = ClassificationDataSet.comineAllBut(dataFolds, f);
        ClassificationDataSet test = dataFolds.get(f);
        if (threadPool == null) cl.trainC(train);
        else cl.trainC(train, threadPool);
        for (int i = 0;
            i < test.getSampleSize();
            i++) // evaluate and mark each point in the held out fold.
        {
          CategoricalResults pred = cl.classify(test.getDataPoint(i));
          if (C == 2)
            metaSet.getDataPoint(pos).getNumericalValues().set(c, pred.getProb(0) * 2 - 1);
          else {
            Vec toSet = metaSet.getDataPoint(pos).getNumericalValues();
            for (int j = weightsPerModel * c; j < weightsPerModel * (c + 1); j++)
              toSet.set(j, pred.getProb(j - weightsPerModel * c));
          }

          pos++;
        }
      }
    }

    // train the meta model
    if (threadPool == null) aggregatingClassifier.trainC(metaSet);
    else aggregatingClassifier.trainC(metaSet, threadPool);

    // train the final classifiers, unless folds=1. In that case they are already trained
    if (folds != 1) {
      for (Classifier cl : baseClassifiers)
        if (threadPool == null) cl.trainC(dataSet);
        else cl.trainC(dataSet, threadPool);
    }
  }
Example #3
0
  @Override
  public CategoricalResults classify(DataPoint data) {
    Vec w = new DenseVector(weightsPerModel * baseClassifiers.size());
    if (weightsPerModel == 1)
      for (int i = 0; i < baseClassifiers.size(); i++)
        w.set(i, baseClassifiers.get(i).classify(data).getProb(0) * 2 - 1);
    else {
      for (int i = 0; i < baseClassifiers.size(); i++) {
        CategoricalResults pred = baseClassifiers.get(i).classify(data);
        for (int j = 0; j < weightsPerModel; j++) w.set(i * weightsPerModel + j, pred.getProb(j));
      }
    }

    return aggregatingClassifier.classify(new DataPoint(w));
  }