@Override
    public NumberStatistics.Builder<G, R> evaluate(
        final Iterable<? extends Phenotype<G, R>> population,
        final int generation,
        final Optimize opt) {
      final Builder<G, R> builder = new Builder<>();
      builder.generation(generation);
      builder.optimize(opt);

      final MinMax<Phenotype<G, R>> minMax = new MinMax<>();
      final Variance<Integer> age = new Variance<>();
      final Variance<R> fitness = new Variance<>();

      accumulators.<Phenotype<G, R>>accumulate(
          population,
          minMax,
          age.map(Phenotype.Age(generation)),
          fitness.map(Phenotype.<R>Fitness()));
      builder.bestPhenotype(opt.best(minMax.getMax(), minMax.getMin()));
      builder.worstPhenotype(opt.worst(minMax.getMax(), minMax.getMin()));
      builder.fitnessMean(fitness.getMean());
      builder.fitnessVariance(fitness.getVariance());
      builder.samples((int) minMax.getSamples());
      builder.ageMean(age.getMean());
      builder.ageVariance(age.getVariance());
      builder.standardError(fitness.getStandardError());

      return builder;
    }
  @DataProvider(name = "expectedDistribution")
  public Object[][] expectedDistribution() {
    final String resource = "/org/jenetics/selector/distribution/TournamentSelector";

    return Arrays.stream(Optimize.values())
        .flatMap(
            opt -> {
              final TestData data = TestData.of(resource, opt.toString());
              final double[][] csv = data.stream().map(TestData::toDouble).toArray(double[][]::new);

              final int[] sizes = TestData.toInt(csv[0]);

              return IntStream.range(0, sizes.length)
                  .mapToObj(
                      i ->
                          new Object[] {
                            sizes[i],
                            Named.of(format("distribution[%d]", sizes[i]), expected(csv, i)),
                            opt
                          });
            })
        .toArray(Object[][]::new);
  }
 @Override
 public void optimize(Optimize optimize) throws EngineException {
   if (optimizeMutex.compareAndSet(false, true)) {
     rwl.readLock().lock();
     try {
       if (indexWriter == null) {
         throw new EngineClosedException(shardId);
       }
       int maxNumberOfSegments = optimize.maxNumSegments();
       if (maxNumberOfSegments == -1) {
         // not set, optimize down to half the configured number of segments
         if (indexWriter.getMergePolicy() instanceof LogMergePolicy) {
           maxNumberOfSegments =
               ((LogMergePolicy) indexWriter.getMergePolicy()).getMergeFactor() / 2;
           if (maxNumberOfSegments < 0) {
             maxNumberOfSegments = 1;
           }
         }
       }
       if (optimize.onlyExpungeDeletes()) {
         indexWriter.expungeDeletes(optimize.waitForMerge());
       } else {
         indexWriter.optimize(maxNumberOfSegments, optimize.waitForMerge());
       }
       // once we did the optimization, we are "dirty" since we removed deletes potentially which
       // affects TermEnum
       dirty = true;
     } catch (Exception e) {
       throw new OptimizeFailedEngineException(shardId, e);
     } finally {
       rwl.readLock().unlock();
       optimizeMutex.set(false);
     }
   }
   if (optimize.flush()) {
     flush(new Flush());
   }
   if (optimize.refresh()) {
     refresh(new Refresh(false));
   }
 }