/** * It generates the output file from a given dataset and stores it in a file * * @param dataset myDataset input dataset * @param filename String the name of the file */ private double doOutput(myDataset dataset, String filename, boolean test) { String output = new String(""); confusionMatrix = new int[dataset.getnClasses()][dataset.getnClasses()]; output = dataset.copyHeader(); // we insert the header in the output file // We write the output for each example for (int i = 0; i < dataset.getnData(); i++) { String clReal = dataset.getOutputAsString(i); String clPred = classificationOutput(i, test); confusionMatrix[dataset.getOutputAsInteger(i)][dataset.numericClass(clPred)]++; output += clReal + " " + clPred + "\n"; } double acc = 0; int nClasses = 0; for (int i = 0; i < confusionMatrix.length; i++) { int count = 0; for (int j = 0; j < confusionMatrix[i].length; j++) { count += confusionMatrix[i][j]; } if (count > 0) { acc += 1.0 * confusionMatrix[i][i] / count; nClasses++; } } Files.writeFile(filename, output); return acc / nClasses; // return 1.0*hits/dataset.size(); }
/** * In this method, all possible post hoc statistical test between more than three algorithms * results are executed, according to the configuration file * * @param code A double that identifies which methods will be applied * @param nfold A vector of int with fold number by algorithm * @param algorithms A vector of String with the names of the algorithms * @param fileName A String with the name of the output file */ public static void doMultiple(double data[][], String algorithms[]) { String outputFileName = Configuration.getPath(); String outputString = new String(""); outputString = header(); outputString += runMultiple(data, algorithms); Files.writeFile(outputFileName, outputString); } // end-method
private void generateModel() { String salida = new String(""); double max_auc = 0; ArrayList<String> solutions = this.getAllSolutions(); models = new ArrayList<Farchd>(); int nEjemplos = train.getnData(); if (this.instances == this.MAJ) { nEjemplos = train.getMajority(); } boolean[] variables = new boolean[train.getnInputs()]; boolean[] ejemplos = new boolean[nEjemplos]; this.weightsAUC = new double[solutions.size() / 2]; // Hay 2 soluciones FS e IS for (int i = 0, j = 0; i < solutions.size(); i += 2, j++) { int vars, ejs; vars = ejs = 0; variables = decode(solutions.get(i)); ejemplos = decode(solutions.get(i + 1)); for (int l = 0; l < variables.length; l++) { // variables[j] = solution[j]; if (variables[l]) vars++; } for (int l = 0; l < ejemplos.length; l++) { if (ejemplos[l]) ejs++; } try { Farchd model = new Farchd(train, val, test, variables, ejemplos); /** ******** */ // double fit = model.getAUCTr(); double auc_tr = model.execute(true); double auc_tst = model.getAUCTst(); if (auc_tr > max_auc) { max_auc = auc_tr; indexBest = j; } this.weightsAUC[j] = auc_tr; salida += "Solution[" + j + "]:\t" + vars + "\t" + ejs + "\t" + auc_tr + "\t" + auc_tst + "\n"; /** ******** */ models.add(model); } catch (Exception e) { System.err.println("Liada maxima al generar modelo "); e.printStackTrace(System.err); System.exit(-1); } } System.out.print(salida); Files.writeFile(header + "_AUC.txt", salida); }
/** * Save network weights to a file * * @param file_name Output file name * @param header header of the data set for which the network has been adjusted to */ protected void printNetworkToFile(String file_name, String header) { // write the header to the file Files.writeFile(file_name, header); Files.addToFile(file_name, "Number of neurons: " + nSel + "\n"); for (int i = 0; i < nSel; i++) { Files.addToFile(file_name, "\nNeuron " + i + "\n"); for (int j = 0; j < conjS[i].length; j++) { Files.addToFile(file_name, Double.toString(conjS[i][j]) + " "); } Files.addToFile(file_name, " Class = " + clasesS[i] + "\n"); } }
/** * It generates the output file from a given dataset and stores it in a file * * @param dataset myDataset input dataset * @param filename String the name of the file */ private void doOutput(myDataset dataset, String filename) { int i; double fuerza; String output = new String(""); output = dataset.copyHeader(); // we insert the header in the output file for (i = 0; i < dataset.getnData(); i++) { fuerza = Output_fuzzy_system(dataset.getExample(i)); output += (dataset.getOutputAsReal(i) + " " + fuerza + " " + "\n"); } Files.writeFile(filename, output); }
/** Method interface for Automatic Branch and Bound */ public void ejecutar() { String resultado; int i, numFeatures; Date d; d = new Date(); resultado = "RESULTS generated at " + String.valueOf((Date) d) + " \n--------------------------------------------------\n"; resultado += "Algorithm Name: " + params.nameAlgorithm + "\n"; /* call of ABB algorithm */ runABB(); resultado += "\nPARTITION Filename: " + params.trainFileNameInput + "\n---------------\n\n"; resultado += "Features selected: \n"; for (i = numFeatures = 0; i < features.length; i++) if (features[i] == true) { resultado += Attributes.getInputAttribute(i).getName() + " - "; numFeatures++; } resultado += "\n\n" + String.valueOf(numFeatures) + " features of " + Attributes.getInputNumAttributes() + "\n\n"; resultado += "Error in test (using train for prediction): " + String.valueOf(data.validacionCruzada(features)) + "\n"; resultado += "Error in test (using test for prediction): " + String.valueOf(data.LVOTest(features)) + "\n"; resultado += "---------------\n"; System.out.println("Experiment completed successfully"); /* creates the new training and test datasets only with the selected features */ Files.writeFile(params.extraFileNameOutput, resultado); data.generarFicherosSalida(params.trainFileNameOutput, params.testFileNameOutput, features); }
/** * It generates the output file from a given dataset and stores it in a file * * @param dataset myDataset input dataset * @param filename String the name of the file * @return The classification accuracy */ private double doOutput(myDataset dataset, String filename) { String output = new String(""); int hits = 0; output = dataset.copyHeader(); // we insert the header in the output file // We write the output for each example for (int i = 0; i < dataset.getnData(); i++) { // for classification: String classOut = this.classificationOutput(dataset.getExample(i)); output += dataset.getOutputAsString(i) + " " + classOut + "\n"; if (dataset.getOutputAsString(i).equalsIgnoreCase(classOut)) { hits++; } } Files.writeFile(filename, output); return (1.0 * hits / dataset.size()); }
/** * Save ensemble at file_name * * @param file_name File name * @param header header of the data set for which the network has been adjusted to */ public void SaveEnsemble(String file_name, String header) { String name; // header of KEEL dataset Files.writeFile(file_name, header); // Save networks for (int i = 0; i < Nnetworks; i++) { Files.addToFile(file_name, "\n>>>>>>>>>>>>>>Network " + i + "\n"); nets[i].SaveNetwork(file_name, header, true); } Files.addToFile(file_name, "\n\n********* Output weights:\n"); // Save weigths // Save weights for (int i = 0; i < Noutputs; i++) { Files.addToFile(file_name, "Output: " + i + "\n"); for (int j = 0; j < Nnetworks; j++) { Files.addToFile(file_name, Double.toString(weights[i][j]) + " "); } } }
/** * It stores the data base in a given file * * @param filename Name for the database file */ public void saveFile(String filename) { String stringOut = new String(""); stringOut = printString(); Files.writeFile(filename, stringOut); }
/** It launches the algorithm */ public void execute() { int i, j, num; double fitness, fitness2; if (somethingWrong) { // We do not execute the program System.err.println( "An error was found, either the data-set have numerical values or missing values."); System.err.println("Aborting the program"); // We should not use the statement: System.exit(-1); } else { // We do here the algorithm's operations Randomize.setSeed(semilla); /* Generation of the initial population */ System.out.println("Creating the initial population."); initializePopulation(); Gen = 0; GenSincambio = 0; Bestperformance = -1.0; /* Main of the genetic algorithm */ System.out.println("Starting the evolutionary process."); do { /* First, all rules' fitness is set to 0 */ for (i = 0; i < tamPoblacion; i++) { Poblacion.get(i).fitness = 0.0; Poblacion.get(i).n_SistemasDifusos = 0; } /* Then Nf fuzzy system are created */ for (i = 0; i < Nf; i++) { /* A fuzzy system containing Nr rules from the population is created */ Create_fuzzy_system(); /* The fitness asociated to this fuzzy system is calculated */ fitness = Evaluate_fuzzy_system(); /* The fitness value is accumulated among the rules in the fuzzy system */ Accumulate_fitness_fuzzy_system(fitness); /* If the fitness of the current fuzzy system outperforms the best evolved one, we update this last one */ if (fitness > Bestperformance) { Bestperformance = fitness; GenSincambio = -1; BestSistemaDifuso.clear(); for (j = 0; j < Nr; j++) { Individual indi = new Individual(Poblacion.get(vectorNr[j])); BestSistemaDifuso.add(indi); } } } /* The accumulated fitness value of each individual in the population is divided by the number of times it has been selected */ for (i = 0; i < tamPoblacion; i++) { if (Poblacion.get(i).n_SistemasDifusos != 0) { Poblacion.get(i).fitness /= Poblacion.get(i).n_SistemasDifusos; } else { Poblacion.get(i).fitness = 0.0; } /* Now we count the number of parameter used in the consequent, in order to give a better fitness to those rules with a lower number of parameters */ num = 0; for (j = 0; j < entradas; j++) { if (Poblacion.get(i).consecuente[j] != 0.0) { num++; } } if (Poblacion.get(i).consecuente[entradas] != 0.0) { num++; } Poblacion.get(i).fitness /= (K + num); } /* we increment the counter of the number of generations */ Gen++; GenSincambio++; if (GenSincambio == numGenMigration) { /* Migration stage: half of the population (the worst one) is radomly generated again to increase the searching ability of the genetic process */ System.out.println( "Migrating half of the population in order to restart the evolutionary process."); Migration(); GenSincambio = 0; } else { /* Reproduction stage (includes crossover) */ Reproduction(); /* Mutation */ Mutation(); } System.out.println("Iteration: " + Gen + ". Best fitness: " + (1.0 / Bestperformance)); } while (Gen <= numGeneraciones); String salida = new String(""); salida += Print_Population(); SistemaDifuso.clear(); for (i = 0; i < Nr; i++) { Individual indi = new Individual(BestSistemaDifuso.get(i)); SistemaDifuso.add(indi); } salida += "MSE Training:\t" + (1.0 / Bestperformance) + "%\n"; salida += "MSE Test:\t\t" + Evaluate_best_fuzzy_system_in_test() + "%\n\n"; Files.writeFile(outputBC, salida); doOutput(this.val, this.outputTr); doOutput(this.test, this.outputTst); System.out.println("Algorithm Finished."); } }
/** Outputs contents of CMAR rule linked list (if any) */ public void outputCMARrules(String filename) { String stringOut = new String(""); stringOut = outputRules(startCMARrulelist); Files.writeFile(filename, stringOut); }