예제 #1
0
  /**
   * It generates the output file from a given dataset and stores it in a file
   *
   * @param dataset myDataset input dataset
   * @param filename String the name of the file
   */
  private double doOutput(myDataset dataset, String filename, boolean test) {
    String output = new String("");
    confusionMatrix = new int[dataset.getnClasses()][dataset.getnClasses()];

    output = dataset.copyHeader(); // we insert the header in the output file
    // We write the output for each example
    for (int i = 0; i < dataset.getnData(); i++) {
      String clReal = dataset.getOutputAsString(i);
      String clPred = classificationOutput(i, test);
      confusionMatrix[dataset.getOutputAsInteger(i)][dataset.numericClass(clPred)]++;
      output += clReal + " " + clPred + "\n";
    }
    double acc = 0;
    int nClasses = 0;
    for (int i = 0; i < confusionMatrix.length; i++) {
      int count = 0;
      for (int j = 0; j < confusionMatrix[i].length; j++) {
        count += confusionMatrix[i][j];
      }
      if (count > 0) {
        acc += 1.0 * confusionMatrix[i][i] / count;
        nClasses++;
      }
    }
    Files.writeFile(filename, output);
    return acc / nClasses;
    // return 1.0*hits/dataset.size();
  }
예제 #2
0
  private void calculateRightNWrongN() {
    int i, j, nData, evaClass;
    boolean stop;
    Rule rule;

    nData = this.train.size();

    for (j = 0; j < this.ruleBase.size(); j++) {
      rule = this.ruleBase.get(j);
      rule.setIni();
    }
    this.WrongNDefault = 0;

    for (i = 0; i < nData; i++) {
      for (j = 0; j < this.ruleBase.size(); j++) {
        rule = this.ruleBase.get(j);
        rule.matching(train.getExample(i));
      }

      this.sort();

      stop = false;
      for (j = 0; j < this.ruleBase.size() && !stop; j++) {
        rule = this.ruleBase.get(j);
        if (train.getOutputAsInteger(i) == rule.getClas()) {
          rule.incrRightN();
          stop = true;
        } else rule.incrWrongN();
      }
    }
  }
예제 #3
0
  /** This function calculates the number of errors */
  private void rateError() {
    int n_errores, Prediction;

    n_errores = 0;
    for (int j = 0; j < train.size(); j++) {
      Prediction = this.FRM_WR(train.getExample(j));
      if (train.getOutputAsInteger(j) != Prediction) n_errores++;
    }

    this.fitness = n_errores;
  }
예제 #4
0
  /** Function to evaluate the whole rule base by using the training dataset */
  public void evalua() {
    int n_clasificados, Prediction;

    n_clasificados = 0;
    for (int j = 0; j < train.size(); j++) {
      Prediction = this.FRM_WR(train.getExample(j));
      if (train.getOutputAsInteger(j) == Prediction) n_clasificados++;
    }

    this.fitness = n_clasificados;
  }
예제 #5
0
파일: DMEL.java 프로젝트: RubelAhmed57/KEEL
  /** It launches the algorithm */
  public void execute() {

    int i, j, k, l;
    int t;
    int ele;
    double prob[];
    double aux;
    double NUmax = 1.5; // used for lineal ranking
    double NUmin = 0.5; // used for lineal ranking
    double pos1, pos2;
    int sel1, sel2;
    int data[][];
    int infoAttr[];
    int classData[];
    Vector<Rule> contenedor = new Vector<Rule>();
    Vector<Rule> conjR = new Vector<Rule>();
    Rule tmpRule;
    Condition tmpCondition[] = new Condition[1];
    RuleSet population[];
    RuleSet hijo1, hijo2;

    if (somethingWrong) { // We do not execute the program
      System.err.println("An error was found, the data-set has numerical values.");
      System.err.println("Aborting the program");
      // We should not use the statement: System.exit(-1);
    } else {
      Randomize.setSeed(seed);

      nClasses = train.getnClasses();

      /*Build the nominal data information*/
      infoAttr = new int[train.getnInputs()];
      for (i = 0; i < infoAttr.length; i++) {
        infoAttr[i] = train.numberValues(i);
      }

      data = new int[train.getnData()][train.getnInputs()];
      for (i = 0; i < data.length; i++) {
        for (j = 0; j < data[i].length; j++) {
          if (train.isMissing(i, j)) data[i][j] = -1;
          else data[i][j] = train.valueExample(i, j);
        }
      }

      classData = new int[train.getnData()];
      for (i = 0; i < classData.length; i++) {
        classData[i] = train.getOutputAsInteger(i);
      }

      /*Find first-order rules which result interesting*/

      for (i = 0; i < nClasses; i++) {
        for (j = 0; j < infoAttr.length; j++) {
          for (k = 0; k < infoAttr[j]; k++) {
            tmpCondition[0] = new Condition(j, k);
            tmpRule = new Rule(tmpCondition);
            if (Math.abs(computeAdjustedResidual(data, classData, tmpRule, i)) > 1.96) {
              if (!contenedor.contains(tmpRule)) {
                contenedor.add(tmpRule);
                conjR.add(tmpRule);
              }
            }
          }
        }
      }

      // Construct the Baker selection roulette
      prob = new double[popSize];
      for (j = 0; j < popSize; j++) {
        aux = (double) (NUmax - NUmin) * ((double) j / (popSize - 1));
        prob[j] = (double) (1.0 / (popSize)) * (NUmax - aux);
      }
      for (j = 1; j < popSize; j++) prob[j] = prob[j] + prob[j - 1];

      /*Steady-State Genetic Algorithm*/
      ele = 2;
      population = new RuleSet[popSize];
      while (conjR.size() >= 2) {
        t = 0;

        System.out.println("Producing rules of level " + ele);

        for (i = 0; i < population.length; i++) {
          population[i] = new RuleSet(conjR);
          population[i].computeFitness(data, classData, infoAttr, contenedor, nClasses);
        }

        Arrays.sort(population);

        while (t < numGenerations && !population[0].equals(population[popSize - 1])) {
          System.out.println("Generation " + t);
          t++;

          /*Baker's selection*/
          pos1 = Randomize.Rand();
          pos2 = Randomize.Rand();
          for (l = 0; l < popSize && prob[l] < pos1; l++) ;
          sel1 = l;
          for (l = 0; l < popSize && prob[l] < pos2; l++) ;
          sel2 = l;

          hijo1 = new RuleSet(population[sel1]);
          hijo2 = new RuleSet(population[sel2]);

          if (Randomize.Rand() < pCross) {
            RuleSet.crossover1(hijo1, hijo2);
          } else {
            RuleSet.crossover2(hijo1, hijo2);
          }

          RuleSet.mutation(hijo1, conjR, pMut, data, classData, infoAttr, contenedor, nClasses);
          RuleSet.mutation(hijo2, conjR, pMut, data, classData, infoAttr, contenedor, nClasses);

          hijo1.computeFitness(data, classData, infoAttr, contenedor, nClasses);
          hijo2.computeFitness(data, classData, infoAttr, contenedor, nClasses);

          population[popSize - 2] = new RuleSet(hijo1);
          population[popSize - 1] = new RuleSet(hijo2);

          Arrays.sort(population);
        }

        /*Decode function*/
        ele++;
        conjR.removeAllElements();
        System.out.println(
            "Fitness of the best chromosome in rule level " + ele + ": " + population[0].fitness);
        for (i = 0; i < population[0].getRuleSet().length; i++) {
          if (Math.abs(computeAdjustedResidual(data, classData, population[0].getRule(i), i))
              > 1.96) {
            if (validarRegla(population[0].getRule(i))
                && !contenedor.contains(population[0].getRule(i))) {
              contenedor.add(population[0].getRule(i));
              conjR.add(population[0].getRule(i));
            }
          }
        }
      }

      // Finally we should fill the training and test output files
      doOutput(this.val, this.outputTr, data, classData, infoAttr, contenedor, nClasses);
      doOutput(this.test, this.outputTst, data, classData, infoAttr, contenedor, nClasses);

      /*Print the rule obtained*/
      for (i = contenedor.size() - 1; i >= 0; i--) {
        if (reglaPositiva(
            this.train, data, classData, infoAttr, nClasses, contenedor.elementAt(i))) {
          Fichero.AnadirtoFichero(outputRule, contenedor.elementAt(i).toString(train));
          Fichero.AnadirtoFichero(
              outputRule,
              " -> "
                  + consecuente(
                      this.train, data, classData, infoAttr, nClasses, contenedor.elementAt(i))
                  + "\n");
        }
      }
      System.out.println("Algorithm Finished");
    }
  }