public void postGeneration() { // Create the solutionSet union of solutionSet and offSpring union_ = ((SolutionSet) population_).union(offspringPopulation_); // Ranking the union Ranking ranking = new Ranking(union_); if (ranking.getNumberOfSubfronts() == 0) System.out.println("No hay subfrentes!!"); int remain = populationSize_; int index = 0; SolutionSet front = null; population_.clear(); // Obtain the next front front = ranking.getSubfront(index); while ((remain > 0) && (remain >= front.size())) { // Assign crowding distance to individuals distance_.crowdingDistanceAssignment(front, problem_.getNumberOfObjectives()); // Add the individuals of this front for (int k = 0; k < front.size(); k++) { population_.add(front.get(k)); } // for // Decrement remain remain = remain - front.size(); // Obtain the next front index++; if (remain > 0) { front = ranking.getSubfront(index); } // if } // while // Remain is less than front(index).size, insert only the best one if (remain > 0) { // front contains individuals to insert distance_.crowdingDistanceAssignment(front, problem_.getNumberOfObjectives()); front.sort(new jmetal.coevolutionary.base.operator.comparator.CrowdingComparator()); for (int k = 0; k < remain; k++) { population_.add(front.get(k)); } // for remain = 0; } // if // This piece of code shows how to use the indicator object into the code // of NSGA-II. In particular, it finds the number of evaluations required // by the algorithm to obtain a Pareto front with a hypervolume higher // than the hypervolume of the true Pareto front. if ((indicators_ != null) && (requiredEvaluations_ == 0)) { double HV = indicators_.getHypervolume(population_); if (HV >= (0.98 * indicators_.getTrueParetoFrontHypervolume())) { requiredEvaluations_ = evaluations_; } // if } // if prepareBestSolutions(); } // postGeneration
private void prepareBestSolutions() { bestDecisionVariables_ = new DecisionVariables[numberOfSolutions_]; Ranking ranking = new Ranking(population_); int remain; SolutionSet subfront = ranking.getSubfront(0); int sz = subfront.size(); int rest = population_.size() - sz; int i; if ((sz >= bestSolutionsFirstLevel_) && (ranking.getNumberOfSubfronts() > 1) && (rest >= (numberOfSolutions_ - bestSolutionsFirstLevel_))) { // Subfront is enought big and the remain is assured int[] indices = RandomVector.getRandomVector_Int(bestSolutionsFirstLevel_, sz); for (i = 0; i < bestSolutionsFirstLevel_; ++i) bestDecisionVariables_[i] = subfront.get(indices[i]).getDecisionVariables(); remain = numberOfSolutions_ - bestSolutionsFirstLevel_; } // if else if (rest < (numberOfSolutions_ - bestSolutionsFirstLevel_)) { // The rest of subfronts are so little int[] indices = RandomVector.getRandomVector_Int(numberOfSolutions_, sz); for (i = 0; i < numberOfSolutions_; ++i) bestDecisionVariables_[i] = subfront.get(indices[i]).getDecisionVariables(); remain = 0; } // else if else if (numberOfSolutions_ == 1) { int[] indices = RandomVector.getRandomVector_Int(bestSolutionsFirstLevel_, sz); bestDecisionVariables_[0] = subfront.get(indices[0]).getDecisionVariables(); remain = 0; i = 1; } // else if else { int[] indices = RandomVector.getRandomVector_Int(sz, sz); for (i = 0; i < sz; ++i) bestDecisionVariables_[i] = subfront.get(indices[i]).getDecisionVariables(); remain = numberOfSolutions_ - sz; } // else int index = 1; while ((index < ranking.getNumberOfSubfronts()) && (remain > 0)) { subfront = ranking.getSubfront(index); sz = subfront.size(); if (sz >= remain) { int[] indices = RandomVector.getRandomVector_Int(remain, sz); for (int j = 0; j < remain; ++j, ++i) bestDecisionVariables_[i] = subfront.get(indices[j]).getDecisionVariables(); remain = 0; } // if else { int[] indices = RandomVector.getRandomVector_Int(sz, sz); for (int j = 0; j < sz; ++j, ++i, --remain) bestDecisionVariables_[i] = subfront.get(indices[j]).getDecisionVariables(); } // else ++index; } // while } // prepareBestSolutions