Example #1
0
  private void calculateRightNWrongN() {
    int i, j, nData, evaClass;
    boolean stop;
    Rule rule;

    nData = this.train.size();

    for (j = 0; j < this.ruleBase.size(); j++) {
      rule = this.ruleBase.get(j);
      rule.setIni();
    }
    this.WrongNDefault = 0;

    for (i = 0; i < nData; i++) {
      for (j = 0; j < this.ruleBase.size(); j++) {
        rule = this.ruleBase.get(j);
        rule.matching(train.getExample(i));
      }

      this.sort();

      stop = false;
      for (j = 0; j < this.ruleBase.size() && !stop; j++) {
        rule = this.ruleBase.get(j);
        if (train.getOutputAsInteger(i) == rule.getClas()) {
          rule.incrRightN();
          stop = true;
        } else rule.incrWrongN();
      }
    }
  }
  private double getAUC(myDataset data) {
    double auc = 0;
    int totalClasses = 0;
    for (int i = 0; i < this.confusionMatrix.length; i++) {
      if (data.numberInstances(i) > 0) {
        totalClasses++;
        double tp = 1.0 * confusionMatrix[i][i] / data.numberInstances(i);
        // System.err.print("TP["+i+"]: "+tp);
        for (int j = 0; j < this.confusionMatrix[i].length; j++) {
          if ((j != i) && (data.numberInstances(j) > 0)) {
            double fp = 1.0 * this.confusionMatrix[j][i] / data.numberInstances(j);
            // System.err.print(", FP["+j+"]: "+fp);
            double auc_j = (tp - fp + 1) / 2.0;
            // System.err.print(", AUC["+j+"]: "+auc_j);
            auc += auc_j;
          }
        }
      }
      // System.err.println("");
    }

    double tpr = 1.0 * confusionMatrix[0][0] / data.numberInstances(0);
    double fpr = 1.0 * confusionMatrix[1][0] / data.numberInstances(1);
    double auc1 = ((1 + tpr - fpr) / 2.0);

    double auc2 = (auc / (totalClasses * (totalClasses - 1)));
    return auc2;
  }
Example #3
0
  private String classificationOutput(
      myDataset dataset,
      int ex,
      int data[][],
      int classData[],
      int infoAttr[],
      Vector<Rule> contenedor,
      int nClases) {

    int j, k, l;
    boolean match;
    double tmp1, tmp2;
    int pos = 0, classPredicted;
    double Waip;
    int ejemplo[] = new int[data[0].length];

    for (j = 0; j < ejemplo.length; j++) {
      if (dataset.isMissing(ex, j)) ejemplo[j] = -1;
      else ejemplo[j] = dataset.valueExample(ex, j);
    }

    classPredicted = -1;
    Waip = 0;

    /*Search a match of the example (following by the container)*/
    for (j = contenedor.size() - 1; j >= 0; j--) {
      match = true;
      for (k = 0; k < contenedor.elementAt(j).getRule().length && match; k++) {
        if (ejemplo[contenedor.elementAt(j).getiCondition(k).getAttribute()]
            != contenedor.elementAt(j).getiCondition(k).getValue()) {
          match = false;
        }
      }
      if (match) {
        tmp1 = Double.NEGATIVE_INFINITY;
        for (l = 0; l < nClases; l++) {
          tmp2 = 0;
          for (k = 0; k < contenedor.elementAt(j).getRule().length; k++) {
            tmp2 +=
                RuleSet.computeWeightEvidence(
                    data, classData, contenedor.elementAt(j).getiCondition(k), l, infoAttr);
          }
          if (tmp2 > tmp1) {
            tmp1 = tmp2;
            pos = l;
          }
        }
        if (tmp1 > Waip) {
          classPredicted = pos;
          Waip = tmp1;
        }
      }
    }
    if (classPredicted == -1) return "Unclassified";

    return dataset.getOutputValue(classPredicted);
  }
Example #4
0
  /**
   * It prints the whole rulebase
   *
   * @return The whole rulebase
   */
  public String printString() {
    int i, j, ant;
    String[] names = train.names();
    String[] clases = train.clases();
    String cadena = new String("");

    ant = 0;

    for (i = 0; i < this.ruleBase.size(); i++) {
      Rule r = this.ruleBase.get(i);
      cadena += (i + 1) + ": ";
      for (j = 0; j < n_variables && r.antecedente[j] < 0; j++) ;
      if (j < n_variables && r.antecedente[j] >= 0) {
        cadena += names[j] + " IS " + r.dataBase.print(j, r.antecedente[j]);
        ant++;
      }
      for (j++; j < n_variables - 1; j++) {
        if (r.antecedente[j] >= 0) {
          cadena += " AND " + names[j] + " IS " + r.dataBase.print(j, r.antecedente[j]);
          ant++;
        }
      }
      if (j < n_variables && r.antecedente[j] >= 0) {
        cadena +=
            " AND "
                + names[j]
                + " IS "
                + r.dataBase.print(j, r.antecedente[j])
                + ": "
                + clases[r.clase]
                + "\n";
        ant++;
      } else cadena += ": " + clases[r.clase] + "\n";
    }

    cadena += (i + 1) + ": Default IS " + clases[0] + "\n"; // Default class

    cadena += "\n\n";
    cadena += "@Dsupp and Dconf:\n\n";
    for (i = 0; i < this.ruleBase.size(); i++) {
      Rule rule = this.ruleBase.get(i);
      cadena += (i + 1) + ": ";
      cadena += "Dsupp: " + rule.getSupport() + " AND Dconf: " + rule.getConfidence() + "\n";
    }

    cadena =
        "@Number of rules: "
            + (this.ruleBase.size() + 1)
            + " Number of Antecedents by rule: "
            + ant * 1.0 / (this.ruleBase.size() + 1.0)
            + "\n\n"
            + cadena;

    return (cadena);
  }
Example #5
0
  /** Function to evaluate the whole rule base by using the training dataset */
  public void evalua() {
    int n_clasificados, Prediction;

    n_clasificados = 0;
    for (int j = 0; j < train.size(); j++) {
      Prediction = this.FRM_WR(train.getExample(j));
      if (train.getOutputAsInteger(j) == Prediction) n_clasificados++;
    }

    this.fitness = n_clasificados;
  }
 private String printConfusionMatrix() {
   String salida = new String("");
   for (int i = 0; i < train.getnClasses(); i++) {
     salida += "\n";
     for (int j = 0; j < train.getnClasses(); j++) {
       salida += confusionMatrix[i][j] + "\t";
     }
   }
   salida += "\n";
   return salida;
 }
Example #7
0
  /** This function calculates the number of errors */
  private void rateError() {
    int n_errores, Prediction;

    n_errores = 0;
    for (int j = 0; j < train.size(); j++) {
      Prediction = this.FRM_WR(train.getExample(j));
      if (train.getOutputAsInteger(j) != Prediction) n_errores++;
    }

    this.fitness = n_errores;
  }
  private void generateModel() {
    String salida = new String("");
    double max_auc = 0;
    ArrayList<String> solutions = this.getAllSolutions();
    models = new ArrayList<Farchd>();

    int nEjemplos = train.getnData();
    if (this.instances == this.MAJ) {
      nEjemplos = train.getMajority();
    }
    boolean[] variables = new boolean[train.getnInputs()];
    boolean[] ejemplos = new boolean[nEjemplos];
    this.weightsAUC = new double[solutions.size() / 2]; // Hay 2 soluciones FS e IS

    for (int i = 0, j = 0; i < solutions.size(); i += 2, j++) {
      int vars, ejs;
      vars = ejs = 0;
      variables = decode(solutions.get(i));
      ejemplos = decode(solutions.get(i + 1));
      for (int l = 0; l < variables.length; l++) {
        // variables[j] = solution[j];
        if (variables[l]) vars++;
      }
      for (int l = 0; l < ejemplos.length; l++) {
        if (ejemplos[l]) ejs++;
      }
      try {
        Farchd model = new Farchd(train, val, test, variables, ejemplos);

        /** ******** */
        // double fit = model.getAUCTr();
        double auc_tr = model.execute(true);
        double auc_tst = model.getAUCTst();
        if (auc_tr > max_auc) {
          max_auc = auc_tr;
          indexBest = j;
        }
        this.weightsAUC[j] = auc_tr;

        salida +=
            "Solution[" + j + "]:\t" + vars + "\t" + ejs + "\t" + auc_tr + "\t" + auc_tst + "\n";

        /** ******** */
        models.add(model);
      } catch (Exception e) {
        System.err.println("Liada maxima al generar modelo ");
        e.printStackTrace(System.err);
        System.exit(-1);
      }
    }
    System.out.print(salida);
    Files.writeFile(header + "_AUC.txt", salida);
  }
Example #9
0
  /**
   * It reads the data from the input files (training, validation and test) and parse all the
   * parameters from the parameters array.
   *
   * @param parameters parseParameters It contains the input files, output files and parameters
   */
  public Algorithm(parseParameters parameters) {

    train = new myDataset();
    val = new myDataset();
    test = new myDataset();
    try {
      System.out.println("\nReading the training set: " + parameters.getTrainingInputFile());
      train.readRegressionSet(parameters.getTrainingInputFile(), true);
      System.out.println("\nReading the validation set: " + parameters.getValidationInputFile());
      val.readRegressionSet(parameters.getValidationInputFile(), false);
      System.out.println("\nReading the test set: " + parameters.getTestInputFile());
      test.readRegressionSet(parameters.getTestInputFile(), false);
    } catch (IOException e) {
      System.err.println("There was a problem while reading the input data-sets: " + e);
      somethingWrong = true;
    }

    // We may check if there are some numerical attributes, because our algorithm may not handle
    // them:
    // somethingWrong = somethingWrong || train.hasNumericalAttributes();
    // somethingWrong = somethingWrong || train.hasMissingAttributes();

    outputTr = parameters.getTrainingOutputFile();
    outputTst = parameters.getTestOutputFile();
    outputBC = parameters.getOutputFile(0);

    // Now we parse the parameters, for example:
    semilla = Long.parseLong(parameters.getParameter(0));
    // ...
    tamPoblacion = Integer.parseInt(parameters.getParameter(1));
    numGeneraciones = Integer.parseInt(parameters.getParameter(2));
    numGenMigration = Integer.parseInt(parameters.getParameter(3));
    Nr = Integer.parseInt(parameters.getParameter(4));
    Nf = Integer.parseInt(parameters.getParameter(5));
    K = Integer.parseInt(parameters.getParameter(6));
    probMut = Double.parseDouble(parameters.getParameter(7));

    entradas = train.getnInputs();

    Poblacion = new ArrayList<Individual>(tamPoblacion);
    for (int i = 0; i < tamPoblacion; i++) {
      Individual indi = new Individual(entradas);
      Poblacion.add(indi);
    }

    Poblacion2 = new ArrayList<Individual>(tamPoblacion);
    Hijos = new ArrayList<Individual>(tamPoblacion / 2);
    SistemaDifuso = new ArrayList<Individual>(Nr);
    BestSistemaDifuso = new ArrayList<Individual>(Nr);

    vectorNr = new int[Nr];
  }
Example #10
0
  /**
   * It generates the output file from a given dataset and stores it in a file
   *
   * @param dataset myDataset input dataset
   * @param filename String the name of the file
   */
  private void doOutput(myDataset dataset, String filename) {
    int i;
    double fuerza;
    String output = new String("");

    output = dataset.copyHeader(); // we insert the header in the output file

    for (i = 0; i < dataset.getnData(); i++) {
      fuerza = Output_fuzzy_system(dataset.getExample(i));
      output += (dataset.getOutputAsReal(i) + " " + fuerza + " " + "\n");
    }

    Files.writeFile(filename, output);
  }
Example #11
0
 /**
  * It generates the output file from a given dataset and stores it in a file
  *
  * @param dataset myDataset input dataset
  * @param filename String the name of the file
  * @param lanzar Chc the algorithm class
  */
 private void doOutput(myDataset dataset, String filename, Chc lanzar) {
   String output = new String("");
   output = dataset.copyHeader(); // we insert the header in the outputa file
   // We write the output for each example
   for (int i = 0; i < dataset.getnData(); i++) {
     // for regression:
     output +=
         dataset.getOutputAsReal(i)
             + " "
             + (double) this.regressionOutput(dataset.getExample(i), lanzar)
             + "\n";
   }
   Fichero.escribeFichero(filename, output);
 }
Example #12
0
  private String consecuente(
      myDataset dataset, int data[][], int classData[], int infoAttr[], int nClases, Rule rule) {

    int k, l;
    double tmp1, tmp2;
    int pos = 0, classPredicted;
    double Waip;

    classPredicted = -1;
    Waip = 0;

    tmp1 = Double.NEGATIVE_INFINITY;
    for (l = 0; l < nClases; l++) {
      tmp2 = 0;
      for (k = 0; k < rule.getRule().length; k++) {
        tmp2 += RuleSet.computeWeightEvidence(data, classData, rule.getiCondition(k), l, infoAttr);
      }
      if (tmp2 > tmp1) {
        tmp1 = tmp2;
        pos = l;
      }
    }
    classPredicted = pos;
    Waip = tmp1;

    return dataset.getOutputValue(classPredicted) + " [" + Double.toString(Waip) + "]";
  }
Example #13
0
  String getOutputTies(double[] max) {
    /*
     * Tie-breaking step 1: Find out which classes gain the maximum score
     */
    double maxValue = max[maxIndex(max)];
    double[] ties = new double[max.length];
    for (int i = 0; i < max.length; i++) {
      if (max[i] == maxValue) {
        ties[i] = aprioriClassDistribution[i];
      }
    }

    max = new double[max.length];
    max[maxIndex(ties)] = 1;

    /*
     * Tie-breaking step 2: Check whether the tying classes have the same a
     * priori class probability and count these classes.
     */
    int tieValues = 0;
    maxValue = ties[maxIndex(ties)];
    for (int i = 0; i < ties.length; i++) {
      if (ties[i] == maxValue) {
        tieValues++;
      }
    }

    /*
     * Tie-breaking step 3: If the tying classes have the same a priori
     * probabilities, then use randomization to determine the winner among
     * these classes
     */
    if (tieValues > 1) {
      tieValues = 0;
      maxValue = ties[maxIndex(ties)];
      int[] stillTying = new int[ties.length];

      for (int i = 0; i < max.length; i++) {
        if (ties[i] == maxValue) {
          stillTying[tieValues] = i;
          tieValues++;
        }
      }
      return train.getOutputValue(stillTying[Randomize.RandintClosed(0, tieValues - 1)]);
    }
    return train.getOutputValue(maxIndex(max));
  }
Example #14
0
  /**
   * It generates the output file from a given dataset and stores it in a file
   *
   * @param dataset myDataset input dataset
   * @param filename String the name of the file
   */
  private double doOutput(myDataset dataset, String filename, boolean test) {
    String output = new String("");
    confusionMatrix = new int[dataset.getnClasses()][dataset.getnClasses()];

    output = dataset.copyHeader(); // we insert the header in the output file
    // We write the output for each example
    for (int i = 0; i < dataset.getnData(); i++) {
      String clReal = dataset.getOutputAsString(i);
      String clPred = classificationOutput(i, test);
      confusionMatrix[dataset.getOutputAsInteger(i)][dataset.numericClass(clPred)]++;
      output += clReal + " " + clPred + "\n";
    }
    double acc = 0;
    int nClasses = 0;
    for (int i = 0; i < confusionMatrix.length; i++) {
      int count = 0;
      for (int j = 0; j < confusionMatrix[i].length; j++) {
        count += confusionMatrix[i][j];
      }
      if (count > 0) {
        acc += 1.0 * confusionMatrix[i][i] / count;
        nClasses++;
      }
    }
    Files.writeFile(filename, output);
    return acc / nClasses;
    // return 1.0*hits/dataset.size();
  }
Example #15
0
  /** It launches the algorithm */
  public void execute() {
    if (this.somethingWrong) { // We do not execute the program
      System.err.println("An error was found, either the data-set has missing values.");
      System.err.println(
          "Please remove the examples with missing data or apply a MV preprocessing.");
      System.err.println("Aborting the program");
      // We should not use the statement: System.exit(-1);
    } else {
      // We do here the algorithm's operations

      int nClasses = train.getnClasses();
      aprioriClassDistribution = new double[nClasses];
      for (int i = 0; i < nClasses; i++) {
        aprioriClassDistribution[i] = 1.0 * val.numberInstances(i) / val.size();
      }

      if (model) { // el modelo no esta generado en fichero previamente
        NSGA2 search =
            new NSGA2(
                train, seed, populationSize, maxTrials, crossover, mutation, instances, fitness);
        try {
          search.execute();
        } catch (Exception e) {
          e.printStackTrace(System.err);
        }
      }

      // Finally we should fill the training and test output files

      this.generateModel();

      double avgTr = this.doOutput(val, this.outputTr, false);
      double aucTr = getAUC(val);
      double avgTst = this.doOutput(test, this.outputTst, true);
      double aucTst = getAUC(test);
      System.out.print("AUC Train: " + aucTr);
      System.out.println("; AvgAcc Train: " + avgTr);
      System.out.print("AUC Test: " + aucTst);
      System.out.println("; AvgAcc Test: " + avgTst);

      totalTime = System.currentTimeMillis() - startTime;
      System.out.println("Algorithm Finished: " + totalTime);
    }
  }
Example #16
0
  /**
   * It Evaluates the performance of the fuzzy system. The Mean Square Error (MSE) by training is
   * used
   */
  public double Evaluate_fuzzy_system() {
    int i;
    double result, suma, fuerza;

    suma = 0.0;
    for (i = 0; i < train.getnData(); i++) {
      fuerza = Output_fuzzy_system(train.getExample(i));
      suma += Math.pow(train.getOutputAsReal(i) - fuerza, 2.0);
    }

    result = suma / train.getnData();

    /* We want to have a maximization problem so, we invert the error */
    if (result != 0.0) {
      result = 1.0 / result;
    }

    return (result);
  }
Example #17
0
  /** It launches the algorithm */
  public void execute() {
    if (somethingWrong) { // We do not execute the program
      System.err.println("An error was found, either the data-set has missing values.");
      System.err.println(
          "Please remove the examples with missing data or apply a MV preprocessing.");
      System.err.println("Aborting the program");
      // We should not use the statement: System.exit(-1);
    } else {
      // We do here the algorithm's operations

      nClasses = train.getnClasses();

      dataBase =
          new DataBase(
              train.getnInputs(), train.getRanges(), train.varNames(), train.getNominals());
      Population pobl =
          new Population(
              train,
              dataBase,
              populationSize,
              nRules,
              crossProb,
              ruleWeight,
              combinationType,
              inferenceType,
              p_DC,
              michProb);
      pobl.Generation(this.nGenerations);

      dataBase.writeFile(this.fileDB);
      ruleBase = pobl.bestRB();
      ruleBase.writeFile(this.fileBR);

      // Finally we should fill the training and test output files
      double accTra = doOutput(this.val, this.outputTr);
      double accTst = doOutput(this.test, this.outputTst);

      System.out.println("Accuracy obtained in training: " + accTra);
      System.out.println("Accuracy obtained in test: " + accTst);
      System.out.println("Algorithm Finished");
    }
  }
Example #18
0
  /**
   * It Evaluates the performance of the best evolved fuzzy system on test data. The Mean Square
   * Error (MSE) is used
   *
   * @return double The MSE error in test data
   */
  public double Evaluate_best_fuzzy_system_in_test() {
    int i;
    double result, suma, fuerza;

    SistemaDifuso.clear();
    for (i = 0; i < Nr; i++) {
      Individual indi = new Individual(BestSistemaDifuso.get(i));
      SistemaDifuso.add(indi);
    }

    suma = 0.0;
    for (i = 0; i < test.getnData(); i++) {
      fuerza = Output_fuzzy_system(test.getExample(i));
      suma += Math.pow(test.getOutputAsReal(i) - fuerza, 2.0);
    }

    result = suma / test.getnData();

    return (result);
  }
Example #19
0
  /**
   * It reads the data from the input files (training, validation and test) and parse all the
   * parameters from the parameters array.
   *
   * @param parameters parseParameters It contains the input files, output files and parameters
   */
  public Algorithm(parseParameters parameters) {

    train = new myDataset();
    val = new myDataset();
    test = new myDataset();
    try {
      System.out.println("\nReading the training set: " + parameters.getTrainingInputFile());
      // train.readClassificationSet(parameters.getTrainingInputFile(), true);
      train.readRegressionSet(parameters.getTrainingInputFile(), true);

      System.out.println("\nReading the validation set: " + parameters.getValidationInputFile());
      val.readRegressionSet(parameters.getValidationInputFile(), false);

      // val.readClassificationSet(parameters.getValidationInputFile(), false);
      System.out.println("\nReading the test set: " + parameters.getTestInputFile());
      test.readRegressionSet(parameters.getTestInputFile(), false);

      // test.readClassificationSet(parameters.getTestInputFile(), false);
    } catch (IOException e) {
      System.err.println("There was a problem while reading the input data-sets: " + e);
      somethingWrong = true;
    }

    // We may check if there are some numerical attributes, because our algorithm may not handle
    // them:
    // somethingWrong = somethingWrong || train.hasNumericalAttributes();
    // somethingWrong = somethingWrong || train.hasMissingAttributes();

    outputTr = parameters.getTrainingOutputFile();
    outputTst = parameters.getTestOutputFile();

    // Now we parse the parameters, for example:

    seed = Long.parseLong(parameters.getParameter(0));
    iterations = Integer.parseInt(parameters.getParameter(1));
    tam_poblacion = Integer.parseInt(parameters.getParameter(2));
    num_bits_gen = Integer.parseInt(parameters.getParameter(3));

    // ...

  }
Example #20
0
  /**
   * It calculate the output of the fuzzy system for a given example
   *
   * @param ejemplo double [] A given example
   * @return double The output value obtained as output of the fuzzy system for a given example
   */
  double Output_fuzzy_system(double[] ejemplo) {
    int i;
    double result, suma1, suma2, omega, y;

    suma1 = suma2 = 0.0;
    for (i = 0; i < Nr; i++) {
      omega = Matching_degree(SistemaDifuso.get(i), ejemplo);
      y = Output_value(SistemaDifuso.get(i), ejemplo);
      suma1 += (omega * y);
      suma2 += omega;
    }

    if (suma2 != 0.0) {
      result = suma1 / suma2;
    } else {
      //                result = 0.0;
      result = ((train.getMax(entradas) - train.getMin(entradas)) / 2.0);
    }

    return (result);
  }
Example #21
0
 /**
  * It returns the algorithm classification output given an input example
  *
  * @param example double[] The input example
  * @return String the output generated by the algorithm
  */
 private String classificationOutput(double[] example) {
   String output = new String("?");
   /**
    * Here we should include the algorithm directives to generate the classification output from
    * the input example
    */
   int clas = ruleBase.FRM(example);
   if (clas >= 0) {
     output = train.getOutputValue(clas);
   }
   return output;
 }
Example #22
0
 /**
  * It generates the output file from a given dataset and stores it in a file
  *
  * @param dataset myDataset input dataset
  * @param filename String the name of the file
  * @param data containing integer identifiers of nominal values
  * @param classData containing integer identifiers of classes
  * @param infoAttr containing number of values for each attribute
  * @param contenedor containing all the interesting rules
  * @param nClasses indicates number of classes
  */
 private void doOutput(
     myDataset dataset,
     String filename,
     int data[][],
     int classData[],
     int infoAttr[],
     Vector<Rule> contenedor,
     int nClases) {
   String output = new String("");
   output = dataset.copyHeader(); // we insert the header in the output file
   // We write the output for each example
   for (int i = 0; i < dataset.getnData(); i++) {
     // for classification:
     output +=
         dataset.getOutputAsString(i)
             + " "
             + this.classificationOutput(
                 dataset, i, data, classData, infoAttr, contenedor, nClasses)
             + "\n";
   }
   Fichero.escribeFichero(filename, output);
 }
Example #23
0
  /** It applies the migration stage */
  void Migration() {
    int i, j, num;
    double u;

    /* First, the individual in the population are ordered according to their fitness */
    Collections.sort(Poblacion);

    /* The second half (the worst one) is radomly initialized again */
    for (j = (tamPoblacion / 2); j < tamPoblacion; j++) {
      /* First, the antecedent */
      for (i = 0; i < entradas; i++) {
        Poblacion.get(j).antecedente[i].m =
            Randomize.RanddoubleClosed(train.getMin(i), train.getMax(i));
        Poblacion.get(j).antecedente[i].sigma = Randomize.RandintClosed(1, 4);
      }

      /* Secondly, the consequent */
      do {
        num = 0;

        for (i = 0; i < entradas; i++) {
          u = Randomize.RandClosed();
          /* The term is used in the consequent */
          if (u < 0.5) {
            Poblacion.get(j).consecuente[i] = Randomize.RanddoubleClosed(-1.0, 1.0);
            //                    Poblacion.get(j).consecuente[entradas] =
            // Randomize.RanddoubleClosed(-45.0, 45.0);
            if (Poblacion.get(j).consecuente[i] != 0.0) {
              num++;
            }
          }
          /* The term is NOT used in the consequent */
          else {
            Poblacion.get(j).consecuente[i] = 0.0;
          }
        }

        u = Randomize.RandClosed();
        /* The term is used in the consequent */
        if (u < 0.5) {
          Poblacion.get(j).consecuente[entradas] =
              Randomize.RanddoubleClosed(
                  -1.0 * ((train.getMax(entradas) - train.getMin(entradas)) / 2.0),
                  ((train.getMax(entradas) - train.getMin(entradas)) / 2.0));
          //                    Poblacion.get(j).consecuente[entradas] =
          // Randomize.RanddoubleClosed(-45.0, 45.0);
          //                Poblacion.get(j).consecuente[entradas] =
          // Randomize.RanddoubleClosed(train.getMin(entradas), train.getMax(entradas));
          if (Poblacion.get(j).consecuente[entradas] != 0.0) {
            num++;
          }
        }
        /* The term is NOT used in the consequent */
        else {
          Poblacion.get(j).consecuente[entradas] = 0.0;
        }
      } while (num == 0);
    }
  }
Example #24
0
  /**
   * It calculate the matching degree between the antecedent of the rule and a given example
   *
   * @param indiv Individual The individual representing a fuzzy rule
   * @param ejemplo double [] A given example
   * @return double The matching degree between the example and the antecedent of the rule
   */
  double Matching_degree(Individual indiv, double[] ejemplo) {
    int i, sig;
    double result, suma, numerador, denominador, sigma, ancho_intervalo;

    suma = 0.0;
    for (i = 0; i < entradas; i++) {
      ancho_intervalo = train.getMax(i) - train.getMin(i);

      sigma = -1.0;
      sig = indiv.antecedente[i].sigma;
      switch (sig) {
        case 1:
          sigma = 0.3;
          break;
        case 2:
          sigma = 0.4;
          break;
        case 3:
          sigma = 0.5;
          break;
        case 4:
          sigma = 0.6;
          break;
      }

      sigma *= ancho_intervalo;

      numerador = Math.pow((ejemplo[i] - indiv.antecedente[i].m), 2.0);
      denominador = Math.pow(sigma, 2.0);
      suma += (numerador / denominador);
    }

    suma *= -1.0;
    result = Math.exp(suma);

    return (result);
  }
Example #25
0
  /**
   * It reads the data from the input files (training, validation and test) and parse all the
   * parameters from the parameters array.
   *
   * @param parameters parseParameters It contains the input files, output files and parameters
   */
  public DMEL(parseParameters parameters) {

    train = new myDataset();
    val = new myDataset();
    test = new myDataset();
    try {
      System.out.println("\nReading the training set: " + parameters.getTrainingInputFile());
      train.readClassificationSet(parameters.getTrainingInputFile(), true);
      System.out.println("\nReading the validation set: " + parameters.getValidationInputFile());
      val.readClassificationSet(parameters.getValidationInputFile(), false);
      System.out.println("\nReading the test set: " + parameters.getTestInputFile());
      test.readClassificationSet(parameters.getTestInputFile(), false);
    } catch (IOException e) {
      System.err.println("There was a problem while reading the input data-sets: " + e);
      somethingWrong = true;
    }

    // We may check if there are some numerical attributes, because our algorithm may not handle
    // them:
    somethingWrong = somethingWrong || train.hasRealAttributes();

    outputTr = parameters.getTrainingOutputFile();
    outputTst = parameters.getTestOutputFile();
    outputRule = parameters.getOutputFile(0);

    // Now we parse the parameters, for example:

    seed = Long.parseLong(parameters.getParameter(0));

    popSize = Integer.parseInt(parameters.getParameter(1));
    pCross = Double.parseDouble(parameters.getParameter(2));
    pMut = Double.parseDouble(parameters.getParameter(3));
    numGenerations = Integer.parseInt(parameters.getParameter(4));

    // ...
  }
Example #26
0
  /** It initializes each individual in the population */
  void initializePopulation() {
    int i, j, num;
    double u;

    for (j = 0; j < tamPoblacion; j++) {
      /* First, the antecedent */
      for (i = 0; i < entradas; i++) {
        Poblacion.get(j).antecedente[i].m =
            Randomize.RanddoubleClosed(train.getMin(i), train.getMax(i));
        Poblacion.get(j).antecedente[i].sigma = Randomize.RandintClosed(1, 4);
      }

      /* Secondly, the consequent */
      do {
        num = 0;

        for (i = 0; i < entradas; i++) {
          u = Randomize.RandClosed();
          /* The term is used in the consequent */
          if (u < 0.5) {
            Poblacion.get(j).consecuente[i] = Randomize.RanddoubleClosed(-1.0, 1.0);
            //                        Poblacion.get(j).consecuente[i] =
            // Randomize.RanddoubleClosed(train.getMin(i) - 45.0, train.getMax(i) + 45.0);
            //                        Poblacion.get(j).consecuente[i] =
            // Randomize.RanddoubleClosed(-45.0, 45.0);
            if (Poblacion.get(j).consecuente[i] != 0.0) {
              num++;
            }
          }
          /* The term is NOT used in the consequent */
          else {
            Poblacion.get(j).consecuente[i] = 0.0;
          }
        }

        u = Randomize.RandClosed();
        /* The term is used in the consequent */
        if (u < 0.5) {
          //                    Poblacion.get(j).consecuente[entradas] = Randomize.RanddoubleClosed(
          // - 45.0, 45.0);
          Poblacion.get(j).consecuente[entradas] =
              Randomize.RanddoubleClosed(
                  -1.0 * ((train.getMax(entradas) - train.getMin(entradas)) / 2.0),
                  ((train.getMax(entradas) - train.getMin(entradas)) / 2.0));
          if (Poblacion.get(j).consecuente[entradas] != 0.0) {
            num++;
          }
        }
        /* The term is NOT used in the consequent */
        else {
          Poblacion.get(j).consecuente[entradas] = 0.0;
        }
      } while (num == 0);
    }
  }
Example #27
0
 /**
  * It generates the output file from a given dataset and stores it in a file
  *
  * @param dataset myDataset input dataset
  * @param filename String the name of the file
  * @return The classification accuracy
  */
 private double doOutput(myDataset dataset, String filename) {
   String output = new String("");
   int hits = 0;
   output = dataset.copyHeader(); // we insert the header in the output file
   // We write the output for each example
   for (int i = 0; i < dataset.getnData(); i++) {
     // for classification:
     String classOut = this.classificationOutput(dataset.getExample(i));
     output += dataset.getOutputAsString(i) + " " + classOut + "\n";
     if (dataset.getOutputAsString(i).equalsIgnoreCase(classOut)) {
       hits++;
     }
   }
   Files.writeFile(filename, output);
   return (1.0 * hits / dataset.size());
 }
Example #28
0
  /**
   * It prints the current population as a String
   *
   * @return String The current population as a String
   */
  public String Print_Population() {
    int i, j, sig;
    double sigma, ancho_intervalo;
    boolean anterior_nulo;
    String output = new String("");

    output += "Rule Base with " + Nr + " rules\n\n";

    for (i = 0; i < Nr; i++) {
      output += "Rule " + (i + 1) + ": IF ";
      for (j = 0; j < entradas; j++) {
        ancho_intervalo = train.getMax(j) - train.getMin(j);

        sigma = -1.0;
        sig = BestSistemaDifuso.get(i).antecedente[j].sigma;
        switch (sig) {
          case 1:
            sigma = 0.3;
            break;
          case 2:
            sigma = 0.4;
            break;
          case 3:
            sigma = 0.5;
            break;
          case 4:
            sigma = 0.6;
            break;
        }

        sigma *= ancho_intervalo;

        output +=
            "X("
                + (j + 1)
                + ") is Gaussian("
                + BestSistemaDifuso.get(i).antecedente[j].m
                + ", "
                + sigma
                + ")";
        if (j != (entradas - 1)) {
          output += " and ";
        }
      }

      output += " THEN Y = ";

      anterior_nulo = true;
      if (BestSistemaDifuso.get(i).consecuente[entradas] != 0.0) {
        anterior_nulo = false;
        output += "(" + BestSistemaDifuso.get(i).consecuente[entradas] + ")";
      }

      for (j = 0; j < entradas; j++) {
        if (BestSistemaDifuso.get(i).consecuente[j] != 0.0) {
          if (anterior_nulo == false) {
            output += " + ";
          }

          anterior_nulo = false;
          output += "(" + BestSistemaDifuso.get(i).consecuente[j] + " * X(" + (j + 1) + "))";
        }
      }

      output += "\n\n";
    }

    return (output);
  }
Example #29
0
  /**
   * It reads the data from the input files (training, validation and test) and parse all the
   * parameters from the parameters array.
   *
   * @param parameters parseParameters It contains the input files, output files and parameters
   */
  public Fuzzy_Ish(parseParameters parameters) {

    train = new myDataset();
    val = new myDataset();
    test = new myDataset();
    try {
      System.out.println("\nReading the training set: " + parameters.getTrainingInputFile());
      train.readClassificationSet(parameters.getTrainingInputFile(), true);
      System.out.println("\nReading the validation set: " + parameters.getValidationInputFile());
      val.readClassificationSet(parameters.getValidationInputFile(), false);
      System.out.println("\nReading the test set: " + parameters.getTestInputFile());
      test.readClassificationSet(parameters.getTestInputFile(), false);
    } catch (IOException e) {
      System.err.println("There was a problem while reading the input data-sets: " + e);
      somethingWrong = true;
    }

    // We may check if there are some numerical attributes, because our algorithm may not handle
    // them:
    // somethingWrong = somethingWrong || train.hasNumericalAttributes();
    // somethingWrong = somethingWrong || train.hasMissingAttributes();

    outputTr = parameters.getTrainingOutputFile();
    outputTst = parameters.getTestOutputFile();

    fileDB = parameters.getOutputFile(0);
    fileBR = parameters.getOutputFile(1);

    // Now we parse the parameters
    long seed = Long.parseLong(parameters.getParameter(0));
    // String aux = parameters.getParameter(1); //Computation of the compatibility degree
    combinationType = PRODUCT;
    /*if (aux.compareToIgnoreCase("minimum") == 0) {
      combinationType = MINIMUM;
    }
    aux = parameters.getParameter(2);*/
    ruleWeight = PCF_IV;
    /*if (aux.compareToIgnoreCase("Certainty_Factor") == 0) {
      ruleWeight = CF;
    }
    else if (aux.compareToIgnoreCase("Mansoory_Rule_Weight_System") == 0) {
      ruleWeight = MCF;
    }
    else if (aux.compareToIgnoreCase("Average_Penalized_Certainty_Factor") == 0) {
      ruleWeight = PCF_II;
    }
    aux = parameters.getParameter(3);*/
    inferenceType = WINNING_RULE;
    /*if (aux.compareToIgnoreCase("Additive_Combination") == 0) {
      inferenceType = ADDITIVE_COMBINATION;
    }*/

    nRules = Integer.parseInt(parameters.getParameter(1));
    if (nRules == 0) {
      if (train.getnInputs() < 10) {
        nRules = 5 * train.getnInputs(); // heuristic
      } else {
        nRules = 50;
      }
    }
    while (nRules % 10 != 0) {
      nRules++;
    } // In order to have no problems with "n_replace"
    if (nRules > train.getnData()) {
      nRules = train.getnInputs() / 10;
      nRules *= 10;
    }

    populationSize = Integer.parseInt(parameters.getParameter(2));
    while (populationSize % 10 != 0) {
      populationSize++;
    }

    crossProb = Double.parseDouble(parameters.getParameter(3));
    this.nGenerations = Integer.parseInt(parameters.getParameter(4));
    p_DC = Double.parseDouble(parameters.getParameter(5));
    michProb = Double.parseDouble(parameters.getParameter(6));

    Randomize.setSeed(seed);
  }
Example #30
0
  /** It applies mutation genetic operator */
  void Mutation() {
    int i, j, aux1, num;
    double u, u2;

    for (j = 0; j < tamPoblacion; j++) {
      /* First, the antecedent */
      for (i = 0; i < entradas; i++) {
        u = Randomize.RandClosed();
        if (u < probMut) {
          Poblacion.get(j).antecedente[i].m =
              Randomize.RanddoubleClosed(train.getMin(i), train.getMax(i));
        }

        u = Randomize.RandClosed();
        if (u < probMut) {
          aux1 = Poblacion.get(j).antecedente[i].sigma;
          do {
            Poblacion.get(j).antecedente[i].sigma = Randomize.RandintClosed(1, 4);
          } while (aux1 == Poblacion.get(j).antecedente[i].sigma);
        }
      }

      /* Secondly, the consequent */
      num = 0;
      for (i = 0; i <= entradas; i++) {
        if (Poblacion.get(j).consecuente[i] != 0.0) {
          num++;
        }
      }

      for (i = 0; i < entradas; i++) {
        u = Randomize.RandClosed();
        if (u < probMut) {
          u2 = Randomize.RandClosed();
          if (u2 < 0.5) {
            Poblacion.get(j).consecuente[i] = Randomize.RanddoubleClosed(-1.0, 1.0);
          }
          /* The term is NOT used in the consequent */
          else {
            if (num != 1) {
              Poblacion.get(j).consecuente[i] = 0.0;
              num--;
            }
          }
        }
      }

      u = Randomize.RandClosed();
      if (u < probMut) {
        u2 = Randomize.RandClosed();
        if (u2 < 0.5) {
          Poblacion.get(j).consecuente[entradas] =
              Randomize.RanddoubleClosed(
                  -1.0 * ((train.getMax(entradas) - train.getMin(entradas)) / 2.0),
                  ((train.getMax(entradas) - train.getMin(entradas)) / 2.0));
          //                        Poblacion.get(j).consecuente[entradas] =
          // Randomize.RanddoubleClosed(-45.0, 45.0);
        }
        /* The term is NOT used in the consequent */
        else {
          if (num != 1) {
            Poblacion.get(j).consecuente[entradas] = 0.0;
            num--;
          }
        }
      }
    }
  }