Ejemplo n.º 1
0
  /**
   * Inserts the classifier in the population. Before that, it looks if there is a classifier in the
   * population with the same action and condition (in this case, increments its numerosity). After,
   * it checks that the number of micro classifiers is less than the maximum population size. If it
   * isn't, it deletes one classifier from the population calling the deleteClassifier function. It
   * inserts the classifier in the population and in the action set if it's not null.
   *
   * @param cl is the classifier that has to be inserted in the population.
   * @param ASet Population where the classifier will be inserted.
   */
  public void insertInPopulation(Classifier cl, Population ASet) {
    boolean found = false;
    int i = 0;

    while (i < macroClSum && !found) {
      if (set[i].equals(cl)) {
        set[i].increaseNumerosity(cl.getNumerosity());
        microClSum += cl.getNumerosity();
        if (ASet != null) {
          if (ASet.isThereClassifier(set[i]) >= 0) ASet.microClSum += cl.getNumerosity();
        }
        found = true;
      }
      i++;
    }
    if (!found) {
      addClassifier(cl);
    }

    // Here, the classifier has been added to the population
    if (microClSum
        > Config.popSize) { // If we have inserted to many classifiers, we have to delete one.
      deleteClFromPopulation(ASet);
    }
  } // end insertInPopulation
Ejemplo n.º 2
0
 /**
  * It creates a new Population with only the sufficiently experienced classifiers.
  *
  * @param maxReward is the maximum reward of the environment.
  * @return a Population with the experienced population
  */
 public Population deleteNotExpClassifiers(double maxReward) {
   // We create a Config.popSize population instead of a macroClSum classifier, because, if it's
   // used in a training execution, this population can increase (new classifiers can be added).
   Population pExp = new Population(Config.popSize);
   for (int i = 0; i < macroClSum; i++) {
     if (set[i].couldReduce(maxReward)) {
       pExp.addClassifier(set[i]);
     }
   }
   return pExp;
 } // end deleteNotExpClassifiers
Ejemplo n.º 3
0
  /**
   * This constructor creates the match set of the population. It has to cover the uncovered actions
   * while, at least, the theta_mna actions aren't covered. It uses the actionCovered variable to do
   * this.
   *
   * @param envState is the state of the input (it has to take the classifiers that match with it).
   * @param pop is the population of the system (it contains all the classifiers).
   * @param tStamp it's the actual time. It's needed to create the new classifiers).
   * @param isExploreExecution indicates if the current step is an explore or an exploit trail,
   *     because the covering operator will be applied or not.
   */
  public Population(double[] envState, Population pop, int tStamp, boolean isExploreExecution) {
    int pos = 0;
    // A population of size parent.numerosity + numberOfActions is needed,
    // because in the worst case, it will have to cover all the actions.
    set = new Classifier[pop.macroClSum + Config.numberOfActions];
    microClSum = 0;
    macroClSum = 0;
    parentRef = pop;
    specify = new Specify();

    boolean[] actionCovered = new boolean[Config.numberOfActions];
    for (pos = 0; pos < actionCovered.length; pos++) {
      actionCovered[pos] = false;
    }

    for (pos = 0; pos < pop.getMacroClSum(); pos++) {
      if (pop.set[pos].match(envState)) {
        addClassifier(pop.set[pos]);
        actionCovered[pop.set[pos].getAction()] = true;
      }
    }

    if (isExploreExecution) {
      Covering cov = new Covering();
      cov.coverActions(pop, this, envState, tStamp, actionCovered);
    }
  } // end Population
Ejemplo n.º 4
0
  /** This method applies the action set subsumption */
  public void doActionSetSubsumption() {
    int i, pos = 0;
    Classifier cl = null;

    for (i = 0; i < macroClSum; i++) {
      if (set[i].couldSubsume()) {
        if (cl == null
            || set[i].numberOfDontCareSymbols() > cl.numberOfDontCareSymbols()
            || (set[i].numberOfDontCareSymbols() == cl.numberOfDontCareSymbols()
                && Config.rand() < 0.5)) {
          cl = set[i];
          pos = i;
        }
      }
    }
    if (cl != null) {
      for (i = 0; i < macroClSum; i++) {
        if (cl != set[i] && cl.isMoreGeneral(set[i])) {
          cl.increaseNumerosity(set[i].getNumerosity());
          // Now, the classifier has to be removed from the actionSet and the population.
          // It's deleted from the action set.
          Classifier clDeleted = set[i];
          deleteClassifier(i);

          // And now, it's deleted from the population
          Population p = parentRef;
          while (p.parentRef != null) {
            p = p.parentRef;
          }

          pos =
              p.isThereClassifier(
                  clDeleted); // The classifier is searched in the initial population.

          if (pos >= 0) p.deleteClassifier(pos);
        }
      }
    }
  } // end doActionSetSubsumption
Ejemplo n.º 5
0
  /**
   * It creates the D population defined by Wilson 2002. It creates a population with the minimum
   * number of classifiers that cover all the input examples.
   *
   * @param env Environment to be set in the new population.
   * @return the D population created
   */
  public Population createMCompPopulation(Environment env) {
    int moreMatches = 0, maxMatched = 0;

    // We create a Config.popSize population instead of a macroClSum classifier, because, if it's
    // used in a training execution, this population can increase (new classifiers can be added).
    Population Mcomp = new Population(Config.popSize);

    while (env.getNumberOfExamples() > 0 && macroClSum > 0) {
      moreMatches = 0;
      maxMatched = set[0].getNumberMatches();
      for (int i = 1; i < macroClSum; i++) {
        if (set[i].getNumberMatches() > maxMatched) {
          maxMatched = set[i].getNumberMatches();
          moreMatches = i;
        }
      }

      Mcomp.addClassifier(set[moreMatches]);
      env.deleteMatchedExamples(set[moreMatches]);
      deleteClassifier(moreMatches);
    }
    return Mcomp;
  } // end createMcompPopulation
Ejemplo n.º 6
0
  /**
   * Deletes one classifier from the population. After that, if the population passed as a parameter
   * is not null, it looks for the deleted classifier. If it is in the second population, it will
   * delete it too.
   *
   * @param aSet is the population where the deleted classifier has to be searched.
   * @return a Classifier that contains the deleted classifier.
   */
  public Classifier deleteClFromPopulation(Population aSet) {
    // A classifier has been deleted from the population
    Classifier clDeleted = deleteClassifier();

    if (aSet
        != null) { // Now, this classifier has to be deleted from the action set (if it exists in).
      int pos = aSet.isThereClassifier(clDeleted); // It is searched in the action set.
      if (pos >= 0) { // It has to be deleted from the action set too.
        aSet.microClSum--;

        // If the classifier has 0 numerosity, we remove it from the population.
        if (clDeleted.getNumerosity() == 0) { // It has to be completely deleted from action set.
          aSet.macroClSum--; // Decrements the number of macroclassifiers
          aSet.set[pos] = aSet.set[aSet.macroClSum]; // Moves the last classifier to the deleted one
          aSet.set[aSet.macroClSum] = null; // Puts the last classifier to null.
        }
      }
    }
    return clDeleted;
  } // end deleteClFromPopulation
Ejemplo n.º 7
0
  /** It's the new reduction algorithm. (Experimental phase) */
  private void reductInTrain() {
    double averageUseful = 0.0, stdUseful = 0.0;
    int i = 0;
    int numSum = 0;
    // A reference to the population is gotten.
    Population pop = this.getParentRef().getParentRef();

    while (i < macroClSum) {
      if (!set[i].couldComp()) {
        numSum += set[i].getNumerosity();
        set[i].setNumerosity(0);
        // The classifier is removed from the [A]
        set[i] = set[macroClSum - 1];
        macroClSum--;
        /////
        numAplicacions++;
      } else {
        averageUseful += set[i].getUsefulTimes();
        stdUseful += (set[i].getUsefulTimes() * set[i].getUsefulTimes());
        i++;
      }
    }

    if (macroClSum > 0) {
      if (macroClSum > 1)
        stdUseful =
            Math.sqrt(
                (stdUseful - ((averageUseful * averageUseful) / macroClSum)) / (macroClSum - 1));
      averageUseful = averageUseful / (double) macroClSum;

      // With the "thres" parameter you can control the compactation pressure (add or substract the
      // stdUseful)
      int thres = (int) (averageUseful - stdUseful);
      i = 0;
      averageUseful = 0;
      while (i < macroClSum) {
        if (set[i].getUsefulTimes() < thres && set[i].getPrediction() > Config.Preduct) {
          numSum += set[i].getNumerosity();
          set[i].setNumerosity(0);
          set[i] = set[macroClSum - 1];
          macroClSum--;
          /////
          numAplicacions++;
        } else {
          // We add the contribuion of each classifier to distribute the numerosity at the end
          averageUseful += set[i].getUsefulTimes();
          i++;
        }
      }

      // The numerosity of classifiers deleted are set to other classifiers in the population.
      int addNum = 0;
      int discount = 0;
      for (i = 0; i < macroClSum - 1; i++) {
        addNum = (int) (((double) set[i].getUsefulTimes() / averageUseful) * (double) numSum);
        set[i].increaseNumerosity(addNum);
        discount += addNum;
      }

      if (macroClSum > 0) set[macroClSum - 1].increaseNumerosity(numSum - discount);

    } else {
      microClSum -= numSum;
      pop.microClSum -= numSum;
    }

    pop.deleteClWithZeroNum();
  }