public static void main(String[] args) {
    if (args.length < 2) {
      System.out.println("Provide a input size and repeat count");
      System.exit(0);
    }
    int N = Integer.parseInt(args[0]);
    if (N < 0) {
      System.out.println(" N cannot be negaitve.");
      System.exit(0);
    }
    Random random = new Random();
    // create the random points
    double[][] points = new double[N][2];
    for (int i = 0; i < points.length; i++) {
      points[i][0] = random.nextDouble();
      points[i][1] = random.nextDouble();
    }
    int iterations = Integer.parseInt(args[1]);
    // for rhc, sa, and ga we use a permutation based encoding
    TravelingSalesmanEvaluationFunction ef = new TravelingSalesmanRouteEvaluationFunction(points);
    Distribution odd = new DiscretePermutationDistribution(N);
    NeighborFunction nf = new SwapNeighbor();
    MutationFunction mf = new SwapMutation();
    CrossoverFunction cf = new TravelingSalesmanCrossOver(ef);
    HillClimbingProblem hcp = new GenericHillClimbingProblem(ef, odd, nf);
    GeneticAlgorithmProblem gap = new GenericGeneticAlgorithmProblem(ef, odd, mf, cf);

    System.out.println("Randomized Hill Climbing\n---------------------------------");
    for (int i = 0; i < iterations; i++) {
      RandomizedHillClimbing rhc = new RandomizedHillClimbing(hcp);
      long t = System.nanoTime();
      FixedIterationTrainer fit = new FixedIterationTrainer(rhc, 200000);
      fit.train();
      System.out.println(
          ef.value(rhc.getOptimal()) + ", " + (((double) (System.nanoTime() - t)) / 1e9d));
    }

    System.out.println("Simulated Annealing \n---------------------------------");
    for (int i = 0; i < iterations; i++) {
      SimulatedAnnealing sa = new SimulatedAnnealing(1E12, .95, hcp);
      long t = System.nanoTime();
      FixedIterationTrainer fit = new FixedIterationTrainer(sa, 200000);
      fit.train();
      System.out.println(
          ef.value(sa.getOptimal()) + ", " + (((double) (System.nanoTime() - t)) / 1e9d));
    }

    System.out.println("Genetic Algorithm\n---------------------------------");
    for (int i = 0; i < iterations; i++) {
      StandardGeneticAlgorithm ga = new StandardGeneticAlgorithm(200, 150, 10, gap);
      long t = System.nanoTime();
      FixedIterationTrainer fit = new FixedIterationTrainer(ga, 1000);
      fit.train();
      System.out.println(
          ef.value(ga.getOptimal()) + ", " + (((double) (System.nanoTime() - t)) / 1e9d));
    }

    System.out.println("MIMIC \n---------------------------------");

    // for mimic we use a sort encoding
    int[] ranges = new int[N];
    Arrays.fill(ranges, N);
    odd = new DiscreteUniformDistribution(ranges);
    Distribution df = new DiscreteDependencyTree(.1, ranges);

    for (int i = 0; i < iterations; i++) {
      ProbabilisticOptimizationProblem pop =
          new GenericProbabilisticOptimizationProblem(ef, odd, df);
      MIMIC mimic = new MIMIC(200, 60, pop);
      long t = System.nanoTime();
      FixedIterationTrainer fit = new FixedIterationTrainer(mimic, 1000);
      fit.train();
      System.out.println(
          ef.value(mimic.getOptimal()) + ", " + (((double) (System.nanoTime() - t)) / 1e9d));
    }
  }
  /**
   * The test main
   *
   * @param args ignored
   */
  public static void main(String[] args) {
    int[] copies = new int[NUM_ITEMS];
    Arrays.fill(copies, COPIES_EACH);
    double[] weights = new double[NUM_ITEMS];
    double[] volumes = new double[NUM_ITEMS];
    for (int i = 0; i < NUM_ITEMS; i++) {
      weights[i] = random.nextDouble() * MAX_WEIGHT;
      volumes[i] = random.nextDouble() * MAX_VOLUME;
    }
    int[] ranges = new int[NUM_ITEMS];
    Arrays.fill(ranges, COPIES_EACH + 1);
    EvaluationFunction ef =
        new KnapsackEvaluationFunction(weights, volumes, KNAPSACK_VOLUME, copies);
    Distribution odd = new DiscreteUniformDistribution(ranges);
    NeighborFunction nf = new DiscreteChangeOneNeighbor(ranges);
    MutationFunction mf = new DiscreteChangeOneMutation(ranges);
    CrossoverFunction cf = new UniformCrossOver();
    Distribution df = new DiscreteDependencyTree(.1, ranges);
    HillClimbingProblem hcp = new GenericHillClimbingProblem(ef, odd, nf);
    GeneticAlgorithmProblem gap = new GenericGeneticAlgorithmProblem(ef, odd, mf, cf);
    ProbabilisticOptimizationProblem pop = new GenericProbabilisticOptimizationProblem(ef, odd, df);

    for (int baseIteration : new int[] {1000, 10000, 50000, 100000, 150000, 200000}) {

      long startTime = System.currentTimeMillis();
      RandomizedHillClimbing rhc = new RandomizedHillClimbing(hcp);
      FixedIterationTrainer fit = new FixedIterationTrainer(rhc, baseIteration);
      fit.train();
      long endTime = System.currentTimeMillis();
      System.out.println(
          "RHC\t"
              + baseIteration
              + "\t"
              + (endTime - startTime)
              + "\t"
              + ef.value(rhc.getOptimal()));

      startTime = System.currentTimeMillis();
      SimulatedAnnealing sa = new SimulatedAnnealing(100, .95, hcp);
      fit = new FixedIterationTrainer(sa, baseIteration);
      fit.train();
      endTime = System.currentTimeMillis();
      System.out.println(
          "SA\t" + baseIteration + "\t" + (endTime - startTime) + "\t" + ef.value(sa.getOptimal()));

      startTime = System.currentTimeMillis();
      StandardGeneticAlgorithm ga = new StandardGeneticAlgorithm(100, 75, 12, gap);
      fit = new FixedIterationTrainer(ga, baseIteration / 100);
      fit.train();
      endTime = System.currentTimeMillis();
      System.out.println(
          "GA\t" + baseIteration + "\t" + (endTime - startTime) + "\t" + ef.value(ga.getOptimal()));

      startTime = System.currentTimeMillis();
      MIMIC mimic = new MIMIC(100, 50, pop);
      fit = new FixedIterationTrainer(mimic, baseIteration / 100);
      fit.train();
      endTime = System.currentTimeMillis();
      System.out.println(
          "MIMIC\t"
              + baseIteration
              + "\t"
              + (endTime - startTime)
              + "\t"
              + ef.value(mimic.getOptimal()));
    }
  }