/**
   * It selects the best solution according to objective 0 and generates the given RB
   *
   * @return the RB with the best value for objective 0
   */
  private ArrayList<String> getAllSolutions() {
    ArrayList<String> solutions = new ArrayList<String>();
    // This procedure can be updated in order to select any other desirable solution
    Files function = new Files();
    String funcionStr = function.readFile(header + ".var");
    StringTokenizer lines = new StringTokenizer(funcionStr, "\n");

    while (lines.hasMoreTokens()) {
      StringTokenizer token = new StringTokenizer(lines.nextToken(), " ");
      String solutionFS = token.nextToken();
      solutionFS = solutionFS.replace("\t", "");
      if (solutionFS.contains("1")) {
        String solutionIS = token.nextToken();
        solutionIS = solutionIS.replace("\t", "");
        if (solutionIS.contains("1")) {
          solutions.add(solutionFS); // 111010110101
          solutions.add(solutionIS); // 111010110101
        } else {
          System.err.println("Skipping empty solution (FS)");
        }
      } else {
        System.err.println("Skipping empty solution (FS)");
      }
    }
    return solutions;
  }
Exemple #2
0
  /**
   * Save network weights to a file
   *
   * @param file_name Output file name
   * @param header header of the data set for which the network has been adjusted to
   */
  protected void printNetworkToFile(String file_name, String header) {
    // write the header to the file
    Files.writeFile(file_name, header);

    Files.addToFile(file_name, "Number of neurons: " + nSel + "\n");
    for (int i = 0; i < nSel; i++) {
      Files.addToFile(file_name, "\nNeuron " + i + "\n");
      for (int j = 0; j < conjS[i].length; j++) {
        Files.addToFile(file_name, Double.toString(conjS[i][j]) + " ");
      }
      Files.addToFile(file_name, " Class = " + clasesS[i] + "\n");
    }
  }
  /**
   * It generates the output file from a given dataset and stores it in a file
   *
   * @param dataset myDataset input dataset
   * @param filename String the name of the file
   */
  private double doOutput(myDataset dataset, String filename, boolean test) {
    String output = new String("");
    confusionMatrix = new int[dataset.getnClasses()][dataset.getnClasses()];

    output = dataset.copyHeader(); // we insert the header in the output file
    // We write the output for each example
    for (int i = 0; i < dataset.getnData(); i++) {
      String clReal = dataset.getOutputAsString(i);
      String clPred = classificationOutput(i, test);
      confusionMatrix[dataset.getOutputAsInteger(i)][dataset.numericClass(clPred)]++;
      output += clReal + " " + clPred + "\n";
    }
    double acc = 0;
    int nClasses = 0;
    for (int i = 0; i < confusionMatrix.length; i++) {
      int count = 0;
      for (int j = 0; j < confusionMatrix[i].length; j++) {
        count += confusionMatrix[i][j];
      }
      if (count > 0) {
        acc += 1.0 * confusionMatrix[i][i] / count;
        nClasses++;
      }
    }
    Files.writeFile(filename, output);
    return acc / nClasses;
    // return 1.0*hits/dataset.size();
  }
Exemple #4
0
  /**
   * In this method, all possible post hoc statistical test between more than three algorithms
   * results are executed, according to the configuration file
   *
   * @param code A double that identifies which methods will be applied
   * @param nfold A vector of int with fold number by algorithm
   * @param algorithms A vector of String with the names of the algorithms
   * @param fileName A String with the name of the output file
   */
  public static void doMultiple(double data[][], String algorithms[]) {

    String outputFileName = Configuration.getPath();

    String outputString = new String("");
    outputString = header();

    outputString += runMultiple(data, algorithms);

    Files.writeFile(outputFileName, outputString);
  } // end-method
  private void generateModel() {
    String salida = new String("");
    double max_auc = 0;
    ArrayList<String> solutions = this.getAllSolutions();
    models = new ArrayList<Farchd>();

    int nEjemplos = train.getnData();
    if (this.instances == this.MAJ) {
      nEjemplos = train.getMajority();
    }
    boolean[] variables = new boolean[train.getnInputs()];
    boolean[] ejemplos = new boolean[nEjemplos];
    this.weightsAUC = new double[solutions.size() / 2]; // Hay 2 soluciones FS e IS

    for (int i = 0, j = 0; i < solutions.size(); i += 2, j++) {
      int vars, ejs;
      vars = ejs = 0;
      variables = decode(solutions.get(i));
      ejemplos = decode(solutions.get(i + 1));
      for (int l = 0; l < variables.length; l++) {
        // variables[j] = solution[j];
        if (variables[l]) vars++;
      }
      for (int l = 0; l < ejemplos.length; l++) {
        if (ejemplos[l]) ejs++;
      }
      try {
        Farchd model = new Farchd(train, val, test, variables, ejemplos);

        /** ******** */
        // double fit = model.getAUCTr();
        double auc_tr = model.execute(true);
        double auc_tst = model.getAUCTst();
        if (auc_tr > max_auc) {
          max_auc = auc_tr;
          indexBest = j;
        }
        this.weightsAUC[j] = auc_tr;

        salida +=
            "Solution[" + j + "]:\t" + vars + "\t" + ejs + "\t" + auc_tr + "\t" + auc_tst + "\n";

        /** ******** */
        models.add(model);
      } catch (Exception e) {
        System.err.println("Liada maxima al generar modelo ");
        e.printStackTrace(System.err);
        System.exit(-1);
      }
    }
    System.out.print(salida);
    Files.writeFile(header + "_AUC.txt", salida);
  }
Exemple #6
0
  /**
   * Save ensemble at file_name
   *
   * @param file_name File name
   * @param header header of the data set for which the network has been adjusted to
   */
  public void SaveEnsemble(String file_name, String header) {
    String name;

    // header of KEEL dataset
    Files.writeFile(file_name, header);
    // Save networks
    for (int i = 0; i < Nnetworks; i++) {
      Files.addToFile(file_name, "\n>>>>>>>>>>>>>>Network " + i + "\n");
      nets[i].SaveNetwork(file_name, header, true);
    }

    Files.addToFile(file_name, "\n\n********* Output weights:\n");
    // Save weigths
    // Save weights
    for (int i = 0; i < Noutputs; i++) {
      Files.addToFile(file_name, "Output: " + i + "\n");
      for (int j = 0; j < Nnetworks; j++) {
        Files.addToFile(file_name, Double.toString(weights[i][j]) + " ");
      }
    }
  }
Exemple #7
0
  /**
   * It generates the output file from a given dataset and stores it in a file
   *
   * @param dataset myDataset input dataset
   * @param filename String the name of the file
   */
  private void doOutput(myDataset dataset, String filename) {
    int i;
    double fuerza;
    String output = new String("");

    output = dataset.copyHeader(); // we insert the header in the output file

    for (i = 0; i < dataset.getnData(); i++) {
      fuerza = Output_fuzzy_system(dataset.getExample(i));
      output += (dataset.getOutputAsReal(i) + " " + fuerza + " " + "\n");
    }

    Files.writeFile(filename, output);
  }
Exemple #8
0
    /**
     * Constructor of the Class Parametros
     *
     * @param nombreFileParametros is the pathname of input parameter file
     */
    Parametros(String nombreFileParametros) {

      try {
        int i;
        String fichero, linea, tok;
        StringTokenizer lineasFile, tokens;

        /* read the parameter file using Files class */
        fichero = Files.readFile(nombreFileParametros);
        fichero += "\n";

        /* remove all \r characters. it is neccesary for a correst use in Windows and UNIX  */
        fichero = fichero.replace('\r', ' ');

        /* extracts the differents tokens of the file */
        lineasFile = new StringTokenizer(fichero, "\n");

        i = 0;
        while (lineasFile.hasMoreTokens()) {

          linea = lineasFile.nextToken();
          i++;
          tokens = new StringTokenizer(linea, " ,\t");
          if (tokens.hasMoreTokens()) {

            tok = tokens.nextToken();
            if (tok.equalsIgnoreCase("algorithm")) nameAlgorithm = getParamString(tokens);
            else if (tok.equalsIgnoreCase("inputdata")) getInputFiles(tokens);
            else if (tok.equalsIgnoreCase("outputdata")) getOutputFiles(tokens);
            else if (tok.equalsIgnoreCase("seed")) seed = getParamLong(tokens);
            else throw new java.io.IOException("Syntax error on line " + i + ": [" + tok + "]\n");
          }
        }

      } catch (java.io.FileNotFoundException e) {
        System.err.println(e + "Parameter file");
      } catch (java.io.IOException e) {
        System.err.println(e + "Aborting program");
        System.exit(-1);
      }

      /** show the read parameter in the standard output */
      String contents = "-- Parameters echo --- \n";
      contents += "Algorithm name: " + nameAlgorithm + "\n";
      contents += "Input Train File: " + trainFileNameInput + "\n";
      contents += "Input Test File: " + testFileNameInput + "\n";
      contents += "Output Train File: " + trainFileNameOutput + "\n";
      contents += "Output Test File: " + testFileNameOutput + "\n";
      System.out.println(contents);
    }
Exemple #9
0
  /**
   * Prints neuron on a file.
   *
   * @param _fileName Name of the file.
   * @return Nothing
   */
  public void printRbf(String _fileName) {
    int i;

    if (_fileName != "") {
      Files.addToFile(_fileName, "   Radius " + radius + "\n");
    } else {
      System.out.println("   Radius " + radius);
    }
    for (i = 0; i < nInput; i++) {
      if (_fileName != "") {
        Files.addToFile(_fileName, "   Center " + centre[i] + "\n");
      } else {
        System.out.println("   Center " + centre[i]);
      }
    }

    for (i = 0; i < nOutput; i++) {
      if (_fileName != "") {
        Files.addToFile(_fileName, "   Weigth " + getWeight(i) + "\n");
      } else {
        System.out.println("   Weigth " + getWeight(i));
      }
    }
  }
Exemple #10
0
  /** Method interface for Automatic Branch and Bound */
  public void ejecutar() {
    String resultado;
    int i, numFeatures;
    Date d;

    d = new Date();
    resultado =
        "RESULTS generated at "
            + String.valueOf((Date) d)
            + " \n--------------------------------------------------\n";
    resultado += "Algorithm Name: " + params.nameAlgorithm + "\n";

    /* call of ABB algorithm */
    runABB();

    resultado += "\nPARTITION Filename: " + params.trainFileNameInput + "\n---------------\n\n";
    resultado += "Features selected: \n";

    for (i = numFeatures = 0; i < features.length; i++)
      if (features[i] == true) {
        resultado += Attributes.getInputAttribute(i).getName() + " - ";
        numFeatures++;
      }

    resultado +=
        "\n\n"
            + String.valueOf(numFeatures)
            + " features of "
            + Attributes.getInputNumAttributes()
            + "\n\n";

    resultado +=
        "Error in test (using train for prediction): "
            + String.valueOf(data.validacionCruzada(features))
            + "\n";
    resultado +=
        "Error in test (using test for prediction): "
            + String.valueOf(data.LVOTest(features))
            + "\n";

    resultado += "---------------\n";

    System.out.println("Experiment completed successfully");

    /* creates the new training and test datasets only with the selected features */
    Files.writeFile(params.extraFileNameOutput, resultado);
    data.generarFicherosSalida(params.trainFileNameOutput, params.testFileNameOutput, features);
  }
Exemple #11
0
 /**
  * It generates the output file from a given dataset and stores it in a file
  *
  * @param dataset myDataset input dataset
  * @param filename String the name of the file
  * @return The classification accuracy
  */
 private double doOutput(myDataset dataset, String filename) {
   String output = new String("");
   int hits = 0;
   output = dataset.copyHeader(); // we insert the header in the output file
   // We write the output for each example
   for (int i = 0; i < dataset.getnData(); i++) {
     // for classification:
     String classOut = this.classificationOutput(dataset.getExample(i));
     output += dataset.getOutputAsString(i) + " " + classOut + "\n";
     if (dataset.getOutputAsString(i).equalsIgnoreCase(classOut)) {
       hits++;
     }
   }
   Files.writeFile(filename, output);
   return (1.0 * hits / dataset.size());
 }
Exemple #12
0
  /**
   * Reads the parameters of the algorithm.
   *
   * @param script Configuration script
   */
  @Override
  protected void readParameters(String script) {

    String file;
    String line;
    StringTokenizer fileLines, tokens;

    file = Files.readFile(script);
    fileLines = new StringTokenizer(file, "\n\r");

    // Discard in/out files definition
    fileLines.nextToken();
    fileLines.nextToken();
    fileLines.nextToken();

    // Getting the seed
    line = fileLines.nextToken();
    tokens = new StringTokenizer(line, "=");
    tokens.nextToken();
    seed = Long.parseLong(tokens.nextToken().substring(1));

    // Getting the K parameter
    line = fileLines.nextToken();
    tokens = new StringTokenizer(line, "=");
    tokens.nextToken();
    K = Integer.parseInt(tokens.nextToken().substring(1));

    // Getting the M parameter
    line = fileLines.nextToken();
    tokens = new StringTokenizer(line, "=");
    tokens.nextToken();
    M = Double.parseDouble(tokens.nextToken().substring(1));

    // Getting the Max Iterations parameter
    line = fileLines.nextToken();
    tokens = new StringTokenizer(line, "=");
    tokens.nextToken();
    maxIterations = Integer.parseInt(tokens.nextToken().substring(1));

    // Getting the delta parameter
    line = fileLines.nextToken();
    tokens = new StringTokenizer(line, "=");
    tokens.nextToken();
    delta = Double.parseDouble(tokens.nextToken().substring(1));
  } // end-method
Exemple #13
0
  /**
   * Defined to manage de semantics of the linguistic variables Generates the semantics of the
   * linguistic variables using a partition consisting of triangle simetrics fuzzy sets. The cut
   * points al stored at 0.5 level of the fuzzy sets to be considered in the computation of the gain
   * of information. Also writes the semantics of the linguistic variables in the specified file
   *
   * @param nFile Name of file to write the semantics
   */
  public void InitSemantics(String nFile) {
    int v, etq;
    float marca, valor, p_corte;
    float auxX0, auxX1, auxX3, auxY;
    String contents;

    contents = "\n--------------------------------------------\n";
    contents += "|  Semantics for the continuous variables  |\n";
    contents += "--------------------------------------------\n";

    for (v = 0; v < num_vars; v++) {
      if (var[v].getContinuous() == true) {
        marca = (var[v].getMax() - var[v].getMin()) / ((float) (var[v].getNLabels() - 1));
        p_corte = var[v].getMin() + marca / 2;
        contents += "Fuzzy sets parameters for variable " + var[v].getName() + ":\n";
        for (etq = 0; etq < var[v].getNLabels(); etq++) {
          valor = var[v].getMin() + marca * (etq - 1);
          auxX0 = Round(valor, var[v].getMax());
          valor = var[v].getMin() + marca * etq;
          auxX1 = Round(valor, var[v].getMax());
          valor = var[v].getMin() + marca * (etq + 1);
          auxX3 = Round(valor, var[v].getMax());
          auxY = 1;
          BaseDatos[v][etq].setVal(auxX0, auxX1, auxX3, auxY);
          p_corte += marca;
          contents +=
              "\tLabel "
                  + etq
                  + ": "
                  + BaseDatos[v][etq].getX0()
                  + " "
                  + BaseDatos[v][etq].getX1()
                  + " "
                  + BaseDatos[v][etq].getX3()
                  + "\n";
        }
      }
    }
    contents += "\n";
    if (nFile != "") Files.addToFile(nFile, contents);
  }
Exemple #14
0
 /**
  * It obtains all the neccesary information of the parameter file<br>
  * First, it reads the names of the training and tests data-set files<br>
  * Then, it reads the output files<br>
  * Finally, it reads the algorithm parameters, such as the seed or the number of iterations<br>
  *
  * @param nomFichero Name of the parameter file
  */
 private void preparaArgumentos(String nomFichero) {
   StringTokenizer linea, datos;
   String fichero =
       Files.readFile(nomFichero); // guardo todo el fichero como un String para procesarlo:
   String una_linea;
   linea = new StringTokenizer(fichero, "\n\r");
   linea.nextToken(); // Algorithm name
   una_linea = linea.nextToken();
   datos = new StringTokenizer(una_linea, " = \" ");
   datos.nextToken(); // inputData
   ficheroTrain = datos.nextToken();
   ficheroEval = datos.nextToken(); // Validation file
   ficheroTest = datos.nextToken();
   una_linea = linea.nextToken();
   datos = new StringTokenizer(una_linea, " = \" ");
   datos.nextToken(); // outputData
   ficheroSalidatr = datos.nextToken();
   ficheroSalidatst = datos.nextToken();
   ficheroSalida = datos.nextToken();
   una_linea = linea.nextToken();
   datos = new StringTokenizer(una_linea, " = \" ");
   datos.nextToken(); // covered
   seCubre = Double.parseDouble(datos.nextToken());
   una_linea = linea.nextToken();
   datos = new StringTokenizer(una_linea, " = \" ");
   datos.nextToken(); // star size
   tamEstrella = Integer.parseInt(datos.nextToken());
   una_linea = linea.nextToken();
   datos = new StringTokenizer(una_linea, " = \" ");
   datos.nextToken(); // accurate
   String eficaciaAux = datos.nextToken();
   eficacia = 0;
   if (eficaciaAux.compareTo("YES") == 0) {
     eficacia = 1;
   }
 };
Exemple #15
0
  /** It launches the algorithm */
  public void execute() {
    int i, j, num;
    double fitness, fitness2;

    if (somethingWrong) { // We do not execute the program
      System.err.println(
          "An error was found, either the data-set have numerical values or missing values.");
      System.err.println("Aborting the program");
      // We should not use the statement: System.exit(-1);
    } else {
      // We do here the algorithm's operations
      Randomize.setSeed(semilla);

      /* Generation of the initial population */
      System.out.println("Creating the initial population.");
      initializePopulation();
      Gen = 0;
      GenSincambio = 0;
      Bestperformance = -1.0;

      /* Main of the genetic algorithm */
      System.out.println("Starting the evolutionary process.");
      do {
        /* First, all rules' fitness is set to 0 */
        for (i = 0; i < tamPoblacion; i++) {
          Poblacion.get(i).fitness = 0.0;
          Poblacion.get(i).n_SistemasDifusos = 0;
        }

        /* Then Nf fuzzy system are created */
        for (i = 0; i < Nf; i++) {
          /* A fuzzy system containing Nr rules from the population is created */
          Create_fuzzy_system();

          /* The fitness asociated to this fuzzy system is calculated */
          fitness = Evaluate_fuzzy_system();

          /* The fitness value is accumulated among the rules in the fuzzy system */
          Accumulate_fitness_fuzzy_system(fitness);

          /* If the fitness of the current fuzzy system outperforms the best evolved one,
          we update this last one */
          if (fitness > Bestperformance) {
            Bestperformance = fitness;
            GenSincambio = -1;

            BestSistemaDifuso.clear();
            for (j = 0; j < Nr; j++) {
              Individual indi = new Individual(Poblacion.get(vectorNr[j]));
              BestSistemaDifuso.add(indi);
            }
          }
        }

        /* The accumulated fitness value of each individual in the population is divided
        by the number of times it has been selected */
        for (i = 0; i < tamPoblacion; i++) {
          if (Poblacion.get(i).n_SistemasDifusos != 0) {
            Poblacion.get(i).fitness /= Poblacion.get(i).n_SistemasDifusos;
          } else {
            Poblacion.get(i).fitness = 0.0;
          }

          /* Now we count the number of parameter used in the consequent, in order to
          give a better fitness to those rules with a lower number of parameters */
          num = 0;
          for (j = 0; j < entradas; j++) {
            if (Poblacion.get(i).consecuente[j] != 0.0) {
              num++;
            }
          }
          if (Poblacion.get(i).consecuente[entradas] != 0.0) {
            num++;
          }

          Poblacion.get(i).fitness /= (K + num);
        }

        /* we increment the counter of the number of generations */
        Gen++;
        GenSincambio++;

        if (GenSincambio == numGenMigration) {
          /* Migration stage: half of the population (the worst one) is radomly generated again
          to increase the searching ability of the genetic process */
          System.out.println(
              "Migrating half of the population in order to restart the evolutionary process.");
          Migration();
          GenSincambio = 0;
        } else {
          /* Reproduction stage (includes crossover) */
          Reproduction();

          /* Mutation */
          Mutation();
        }
        System.out.println("Iteration: " + Gen + ". Best fitness: " + (1.0 / Bestperformance));
      } while (Gen <= numGeneraciones);

      String salida = new String("");
      salida += Print_Population();

      SistemaDifuso.clear();
      for (i = 0; i < Nr; i++) {
        Individual indi = new Individual(BestSistemaDifuso.get(i));
        SistemaDifuso.add(indi);
      }

      salida += "MSE Training:\t" + (1.0 / Bestperformance) + "%\n";
      salida += "MSE Test:\t\t" + Evaluate_best_fuzzy_system_in_test() + "%\n\n";

      Files.writeFile(outputBC, salida);

      doOutput(this.val, this.outputTr);
      doOutput(this.test, this.outputTst);

      System.out.println("Algorithm Finished.");
    }
  }
Exemple #16
0
  /**
   * Computes and stores the information gain of each attribute (variable) of the dataset
   *
   * @param Examples Set of instances of the dataset
   * @param nFile Name of the file
   */
  public void GainInit(TableDat Examples, String nFile) {

    int i, j, h, v;
    boolean encontrado;
    float info_gk, suma, suma1, suma2, p_clase, logaritmo;
    int num_clase[] = new int[n_clases];
    int n_vars = this.getNVars();
    int MaxVal = this.getMaxVal();
    float p[][] = new float[n_vars][MaxVal];
    float p_cond[][][] = new float[n_clases][n_vars][MaxVal];
    GI = new float[n_vars];
    intervalosGI = new float[n_vars][MaxVal];

    String contents;
    contents = "\n--------------------------------------------\n";
    contents += "|       Computation of the info gain       |\n";
    contents += "--------------------------------------------\n";
    contents += "Points for computation of the info gain:\n";

    // Loads the values for "intervalosGI"
    float marca, p_corte;
    for (int v1 = 0; v1 < n_vars; v1++) {
      if (this.getContinuous(v1) == true) {
        contents += "\tVariable " + var[v1].getName() + ": ";
        marca = (this.getMax(v1) - this.getMin(v1)) / ((float) (this.getNLabelVar(v1) - 1));
        p_corte = this.getMin(v1) + marca / 2;
        for (int et = 0; et < this.getNLabelVar(v1); et++) {
          intervalosGI[v1][et] = p_corte;
          contents += intervalosGI[v1][et] + "  ";
          p_corte += marca;
        }
        contents += "\n";
      }
    }

    // Structure initialization
    for (i = 0; i < n_clases; i++) num_clase[i] = 0;
    for (i = 0; i < n_vars; i++)
      for (j = 0; j < MaxVal; j++) {
        p[i][j] = 0; // Simple probabilities matrix
        for (h = 0; h < n_clases; h++) p_cond[h][i][j] = 0; // Conditional probabilities matrix
      }

    // Computation of the Simple and Conditional probabilities matrixs
    for (i = 0; i < Examples.getNEx(); i++) {
      num_clase[Examples.getClass(i)]++; // distribution by classes
      for (j = 0; j < n_vars; j++) { // distribution by values
        if (!this.getContinuous(j)) { // Discrete variable
          if (!Examples.getLost(this, i, j)) {
            // if the value is not a lost one
            p[j][(int) Examples.getDat(i, j)]++;
            p_cond[(int) Examples.getClass(i)][j][(int) Examples.getDat(i, j)]++;
          }
        } else { // Continuous variable
          encontrado = false;
          h = 0;
          while (!encontrado && h < this.getNLabelVar(j)) {
            if (Examples.getDat(i, j) <= intervalosGI[j][h]) encontrado = true;
            else h++;
          }
          if (encontrado == true) {
            p[j][h]++;
            p_cond[(int) Examples.getClass(i)][j][h]++;
          } else {
            if (!Examples.getLost(this, i, j)) {
              // Lost value
              System.out.println(
                  "Fallo al calcular la ganancia de infor, Variable " + j + " Ejemplo " + i);
              return;
            }
          }
        }
      }
    }
    for (h = 0; h < n_clases; h++)
      for (i = 0; i < n_vars; i++) {
        if (!this.getContinuous(i)) // Discrete variable
        for (j = (int) this.getMin(i); j <= (int) this.getMax(i); j++)
            p_cond[h][i][j] = p_cond[h][i][j] / Examples.getNEx();
        else // Continuous variable
        for (j = 0; j < this.getNLabelVar(i); j++)
            p_cond[h][i][j] = p_cond[h][i][j] / Examples.getNEx();
      }
    for (i = 0; i < n_vars; i++) {
      if (!this.getContinuous(i)) // Discrete variable
      for (j = (int) this.getMin(i); j <= (int) this.getMax(i); j++)
          p[i][j] = p[i][j] / Examples.getNEx();
      else // Continuous variable
      for (j = 0; j < this.getNLabelVar(i); j++) p[i][j] = p[i][j] / Examples.getNEx();
    }

    // Info Gk computation
    suma = 0;
    for (i = 0; i < n_clases; i++) {
      p_clase = ((float) num_clase[i]) / Examples.getNEx();
      if (p_clase > 0) {
        logaritmo = (float) (Math.log((double) p_clase) / Math.log(2));
        suma += p_clase * logaritmo;
      }
    }
    info_gk = (-1) * suma;

    // Information gain computation for each attibute
    for (v = 0; v < n_vars; v++) {
      suma = info_gk;
      suma1 = 0;
      if (!this.getContinuous(v)) { // Discrete Variable
        for (i = (int) this.getMin(v); i <= (int) this.getMax(v); i++) {
          suma2 = 0;
          for (j = 0; j < n_clases; j++)
            if (p_cond[j][v][i] > 0) {
              logaritmo = (float) (Math.log(p_cond[j][v][i]) / Math.log(2));
              suma2 += p_cond[j][v][i] * logaritmo;
            }
          suma1 += p[v][i] * (-1) * suma2;
        }
      } else { // Continuous Variable
        for (i = 0; i < this.getNLabelVar(v); i++) {
          suma2 = 0;
          for (j = 0; j < n_clases; j++)
            if (p_cond[j][v][i] > 0) {
              logaritmo = (float) (Math.log(p_cond[j][v][i]) / Math.log(2));
              suma2 += p_cond[j][v][i] * logaritmo;
            }
          suma1 += p[v][i] * (-1) * suma2;
        }
      }
      GI[v] = suma + (-1) * suma1;
    }

    contents += "Information Gain of the variables:\n";
    for (v = 0; v < n_vars; v++) {
      if (this.getContinuous(v) == true)
        contents += "\tVariable " + var[v].getName() + ": " + GI[v] + "\n";
    }

    if (nFile != "") Files.addToFile(nFile, contents);
  }
Exemple #17
0
 /**
  * It stores the data base in a given file
  *
  * @param filename Name for the database file
  */
 public void saveFile(String filename) {
   String stringOut = new String("");
   stringOut = printString();
   Files.writeFile(filename, stringOut);
 }
Exemple #18
0
 /** Outputs contents of CMAR rule linked list (if any) */
 public void outputCMARrules(String filename) {
   String stringOut = new String("");
   stringOut = outputRules(startCMARrulelist);
   Files.writeFile(filename, stringOut);
 }