Exemple #1
0
  /**
   * Calculates the class membership probabilities for the given test instance.
   *
   * @param instance the instance to be classified
   * @return predicted class probability distribution
   * @exception Exception if distribution can't be computed
   */
  @Override
  public double[] distributionForInstance(Instance instance) throws Exception {

    double[] probs = new double[instance.numClasses()];
    int attIndex;

    for (int j = 0; j < instance.numClasses(); j++) {
      probs[j] = 1;
      Enumeration<Attribute> enumAtts = instance.enumerateAttributes();
      attIndex = 0;
      while (enumAtts.hasMoreElements()) {
        Attribute attribute = enumAtts.nextElement();
        if (!instance.isMissing(attribute)) {
          if (attribute.isNominal()) {
            probs[j] *= m_Counts[j][attIndex][(int) instance.value(attribute)];
          } else {
            probs[j] *=
                normalDens(instance.value(attribute), m_Means[j][attIndex], m_Devs[j][attIndex]);
          }
        }
        attIndex++;
      }
      probs[j] *= m_Priors[j];
    }

    // Normalize probabilities
    Utils.normalize(probs);

    return probs;
  }
Exemple #2
0
  public double[] getVotesForInstance(Instance inst) {
    DoubleVector combinedVote = new DoubleVector();
    DoubleVector confidenceVec = new DoubleVector();

    int success = 0;
    double[] ensembleVotes = new double[inst.numClasses()];
    double[] ensMemberWeights = new double[this.ensemble.length];
    boolean[] ensMemberFlags = new boolean[this.ensemble.length];
    double confidence = 0.0;

    for (int i = 0; i < this.ensemble.length; i++) {
      if (!ensMemberFlags[i]) {
        ensMemberWeights[i] = getEnsembleMemberWeight(i);
      }
      if (ensMemberWeights[i] > 0.0) {
        DoubleVector vote = new DoubleVector(this.ensemble[i].getVotesForInstance(inst));
        if (vote.sumOfValues() > 0.0) {
          vote.normalize();
          vote.scaleValues(ensMemberWeights[i]);
          combinedVote.addValues(vote);
          if (getEnsembleMemberError(i) < errorRatio) {
            //
            // these are the votes of the ensembles for the classes
            //
            success++;
            // successFlag = true;
            confidenceVec.addValues(vote);
            ensembleVotes[confidenceVec.maxIndex()] +=
                confidenceVec.getValue(confidenceVec.maxIndex());
          }
        }
      } else {
        break;
      }
    }

    confidenceVec = (DoubleVector) combinedVote.copy();
    confidenceVec.normalize();
    confidence = confidenceVec.getValue(confidenceVec.maxIndex());
    // Reconfigure the activeLearningRatio
    // For confidence measure add to the pool  and in order to fit the confidence value between 0
    // and 1 divide by success val
    if (confidence > confidenceThreshold) {
      double qbcEntropy =
          QBC.queryByCommitee(ensembleVotes, inst.numClasses(), success, ensemble.length);
      Math.pow(qbcEntropy, 2);
      System.out.println("QBC Entropy: " + qbcEntropy);
      double activeLearningRatio = getActiveLearningRatio(qbcEntropy, combinedVote);
      inst.setClassValue(combinedVote.maxIndex()); // Set the class value of the instance
      instConfPool.addVotedInstance(
          inst, combinedVote.getValue(combinedVote.maxIndex()), activeLearningRatio);
      instConfCount++;
    }

    return combinedVote.getArrayRef();
  }
Exemple #3
0
  /**
   * Use <code> classifyInstance </code> from <code> OSDLCore </code> and assign probability one to
   * the chosen label. The implementation is heavily based on the same method in the <code>
   *  Classifier </code> class.
   *
   * @param instance the instance to be classified
   * @return an array containing a single '1' on the index that <code> classifyInstance </code>
   *     returns.
   */
  public double[] distributionForInstance(Instance instance) {

    // based on the code from the Classifier class
    double[] dist = new double[instance.numClasses()];
    int classification = 0;
    switch (instance.classAttribute().type()) {
      case Attribute.NOMINAL:
        try {
          classification = (int) Math.round(classifyInstance(instance));
        } catch (Exception e) {
          System.out.println("There was a problem with classifyIntance");
          System.out.println(e.getMessage());
          e.printStackTrace();
        }
        if (Utils.isMissingValue(classification)) {
          return dist;
        }
        dist[classification] = 1.0;
        return dist;

      case Attribute.NUMERIC:
        try {
          dist[0] = classifyInstance(instance);
        } catch (Exception e) {
          System.out.println("There was a problem with classifyIntance");
          System.out.println(e.getMessage());
          e.printStackTrace();
        }
        return dist;

      default:
        return dist;
    }
  }
 public double classifyInstance(Instance instance) throws Exception {
   double maxProb = -1;
   double currentProb;
   int maxIndex = 0;
   int j;
   for (j = 0; j < instance.numClasses(); j++) {
     currentProb = getProbs(j, instance, 1);
     if (Utils.gr(currentProb, maxProb)) {
       maxIndex = j;
       maxProb = currentProb;
     }
   }
   return (double) maxIndex;
 }
  /**
   * Classifies an instance.
   *
   * @param instance the instance to classify
   * @return the classification
   * @throws Exception if instance can't be classified successfully
   */
  public double classifyInstance(Instance instance) throws Exception {

    double maxProb = -1;
    int maxIndex = 0;

    // classify by maximum probability
    double[] probs = distributionForInstance(instance);
    for (int j = 0; j < instance.numClasses(); j++) {
      if (Utils.gr(probs[j], maxProb)) {
        maxIndex = j;
        maxProb = probs[j];
      }
    }
    return (double) maxIndex;
  }
Exemple #6
0
    @Override
    public double[] distributionForInstance(Instance instance) throws Exception {
      double[] distribution = getClassifier().distributionForInstance(instance);
      int maxIndex = 0;
      for (int i = 0; i < distribution.length; i++) {
        if (distribution[maxIndex] < distribution[i]) maxIndex = i;
      }
      final String maxLabel = instance.classAttribute().value(maxIndex);
      if (sureClasses.contains(maxLabel)) {
        Arrays.fill(distribution, 0.0);
        distribution[maxIndex] = 1.0;
        System.err.println("INFO: Hacked confidence of '" + maxLabel + "'.");
      } else {
        Arrays.fill(distribution, 1.0d / instance.numClasses());
      }

      return distribution;
    }
Exemple #7
0
  /**
   * Calculates the class membership probabilities for the given test instance.
   *
   * @param instance the instance to be classified
   * @return predicted class probability distribution
   * @exception Exception if distribution can't be computed successfully
   */
  public double[] distributionForInstance(Instance instance) throws Exception {
    if (instance.classAttribute().isNumeric()) {
      throw new UnsupportedClassTypeException("Decorate can't handle a numeric class!");
    }
    double[] sums = new double[instance.numClasses()], newProbs;
    Classifier curr;

    for (int i = 0; i < m_Committee.size(); i++) {
      curr = (Classifier) m_Committee.get(i);
      newProbs = curr.distributionForInstance(instance);
      for (int j = 0; j < newProbs.length; j++) sums[j] += newProbs[j];
    }
    if (Utils.eq(Utils.sum(sums), 0)) {
      return sums;
    } else {
      Utils.normalize(sums);
      return sums;
    }
  }
Exemple #8
0
  /**
   * Calculates the class membership probabilities for the given test instance.
   *
   * @param instance the instance to be classified
   * @return predicted class probability distribution
   * @throws Exception if instance could not be classified successfully
   */
  public double[] distributionForInstance(Instance instance) throws Exception {

    double[] sums = new double[instance.numClasses()];
    for (int i = -1; i < m_NumIterationsPerformed; i++) {
      double prob = 1, shrinkage = m_Shrinkage;
      if (i == -1) {
        prob = m_ZeroR.distributionForInstance(instance)[0];
        shrinkage = 1.0;
      } else {
        prob = m_Classifiers[i].distributionForInstance(instance)[0];

        // Make sure that probabilities are never 0 or 1 using ad-hoc smoothing
        prob = (m_SumOfWeights * prob + 1) / (m_SumOfWeights + 2);
      }
      sums[0] += shrinkage * 0.5 * (Math.log(prob) - Math.log(1 - prob));
    }
    sums[1] = -sums[0];
    return Utils.logs2probs(sums);
  }
  /**
   * Predicts the class memberships for a given instance. If an instance is unclassified, the
   * returned array elements must be all zero. If the class is numeric, the array must consist of
   * only one element, which contains the predicted value. Note that a classifier MUST implement
   * either this or classifyInstance().
   *
   * @param instance the instance to be classified
   * @return an array containing the estimated membership probabilities of the test instance in each
   *     class or the numeric prediction
   * @exception Exception if distribution could not be computed successfully
   */
  @Override
  public double[] distributionForInstance(Instance instance) throws Exception {

    double[] dist = new double[instance.numClasses()];
    switch (instance.classAttribute().type()) {
      case Attribute.NOMINAL:
        double classification = classifyInstance(instance);
        if (Utils.isMissingValue(classification)) {
          return dist;
        } else {
          dist[(int) classification] = 1.0;
        }
        return dist;
      case Attribute.NUMERIC:
      case Attribute.DATE:
        dist[0] = classifyInstance(instance);
        return dist;
      default:
        return dist;
    }
  }
  /**
   * internal function for determining the class distribution for an instance, will be overridden by
   * derived classes. <br>
   * Here we just retrieve the class value for the given instance that was calculated during
   * building our classifier. It just returns "1" for the class that was predicted, no real
   * distribution. Note: the ReplaceMissingValues filter is applied to a copy of the given instance.
   *
   * @param instance the instance to get the distribution for
   * @return the distribution for the given instance
   * @see Classifier#distributionForInstance(Instance)
   * @see ReplaceMissingValues
   * @throws Exception if something goes wrong
   */
  protected double[] getDistribution(Instance instance) throws Exception {
    double[] result;
    int i;

    result = new double[instance.numClasses()];
    for (i = 0; i < result.length; i++) result[i] = 0.0;

    i = Arrays.binarySearch(m_LabeledTestset, instance, new InstanceComparator(false));
    if (i >= 0) {
      result[(int) m_LabeledTestset[i].classValue()] = 1.0;
    } else {
      CollectiveHelper.writeToTempFile("train.txt", m_TrainsetNew.toString());
      CollectiveHelper.writeToTempFile("test.txt", m_TestsetNew.toString());
      throw new Exception(
          "Cannot find test instance: "
              + instance
              + "\n -> pos="
              + i
              + " = "
              + m_LabeledTestset[StrictMath.abs(i)]);
    }

    return result;
  }
 /**
  * Calculates the class membership probabilities for the given test instance.
  *
  * @param instance the instance to be classified
  * @return predicted class probability distribution
  * @throws Exception if class is numeric
  */
 public double[] distributionForInstance(Instance instance) throws Exception {
   if (m_GroovyObject != null) return m_GroovyObject.distributionForInstance(instance);
   else return new double[instance.numClasses()];
 }