/**
   * Computes the given input to produce the corresponding output.
   *
   * @param inputs An input vector.
   * @param votes A vector containing the number of votes for each class.
   * @return The decision label for the given input.
   */
  private int computeVoting(double[] inputs, int[] votes) {
    // Compute decision by Voting

    // out variables cannot be passed into delegates,
    // so will be creating a copy for the vote array.
    // int[] voting = new int[getClasses()];

    // For each class
    for (int i = 0; i < getClasses(); i++) {
      // For each other class
      for (int j = 0; j < i; j++) {
        // Retrieve and compute the two-class problem for classes i x j
        double answer = machines[i - 1][j].Compute(inputs);

        // Determine the winner class
        int y = (answer < 0) ? i : j;

        // Increment votes for the winner
        votes[y]++;
      }
    }

    // Select class with maximum number of votes
    return Matrix.MaxIndex(votes); // Return the winner as the output.
  }
    /** Returns the function value after training. */
    double predict(T x) {
      double f = b;

      if (kernel instanceof Linear && w != null) {
        if (x instanceof double[]) {
          f += Matrix.InnerProduct(w, (double[]) x);
        } else if (x instanceof SparseArray) {
          for (SparseArray.Entry e : (SparseArray) x) {
            f += w[e.i] * e.x;
          }
        } else {
          throw new UnsupportedOperationException("Unsupported data type for linear kernel");
        }
      } else {

        for (SupportVector v : sv) {
          if (v != null) {
            f += v.alpha * kernel.Function(v.x, x);
          }
        }
      }

      return f;
    }
  /**
   * Trains the SVM with the given dataset for one epoch. The caller may call this method multiple
   * times to obtain better accuracy although one epoch is usually sufficient. After calling this
   * method sufficient times (usually 1 or 2), the users should call {@link #finalize()} to further
   * process support vectors.
   *
   * @param x training instances.
   * @param y training labels in [0, k), where k is the number of classes.
   * @param weight instance weight. Must be positive. The soft margin penalty parameter for instance
   *     i will be weight[i] * C.
   */
  @SuppressWarnings("unchecked")
  public void Learn(T[] x, int[] y, double[] weight) {
    if (x.length != y.length) {
      throw new IllegalArgumentException(
          String.format("The sizes of X and Y don't match: %d != %d", x.length, y.length));
    }

    if (weight != null && x.length != weight.length) {
      throw new IllegalArgumentException(
          String.format(
              "The sizes of X and instance weight don't match: %d != %d", x.length, weight.length));
    }

    int miny = Matrix.Min(y);
    if (miny < 0) {
      throw new IllegalArgumentException("Negative class label:" + miny);
    }

    int maxy = Matrix.Max(y);
    if (maxy >= k) {
      throw new IllegalArgumentException("Invalid class label:" + maxy);
    }

    if (k == 2) {
      int[] yi = new int[y.length];
      for (int i = 0; i < y.length; i++) {
        if (y[i] == 1) {
          yi[i] = +1;
        } else {
          yi[i] = -1;
        }
      }

      if (weight == null) {
        svm.learn(x, yi);
      } else {
        svm.learn(x, yi, weight);
      }
    } else if (strategy == Multiclass.ONE_VS_ALL) {
      List<TrainingTask> tasks = new ArrayList<TrainingTask>(k);
      for (int i = 0; i < k; i++) {
        int[] yi = new int[y.length];
        double[] w = wi == null ? weight : new double[y.length];
        for (int l = 0; l < y.length; l++) {
          if (y[l] == i) {
            yi[l] = +1;
          } else {
            yi[l] = -1;
          }

          if (wi != null) {
            w[l] = wi[y[l]];
            if (weight != null) {
              w[l] *= weight[l];
            }
          }
        }

        tasks.add(new TrainingTask(svms.get(i), x, yi, w));
      }

      try {
        MulticoreExecutor.run(tasks);
      } catch (Exception e) {
        System.err.println(e.getMessage());
      }
    } else {
      List<TrainingTask> tasks = new ArrayList<TrainingTask>(k * (k - 1) / 2);
      for (int i = 0, m = 0; i < k; i++) {
        for (int j = i + 1; j < k; j++, m++) {
          int n = 0;
          for (int l = 0; l < y.length; l++) {
            if (y[l] == i || y[l] == j) {
              n++;
            }
          }

          T[] xij = (T[]) java.lang.reflect.Array.newInstance(x.getClass().getComponentType(), n);
          int[] yij = new int[n];
          double[] wij = weight == null ? null : new double[n];

          for (int l = 0, q = 0; l < y.length; l++) {
            if (y[l] == i) {
              xij[q] = x[l];
              yij[q] = +1;
              if (weight != null) {
                wij[q] = weight[l];
              }
              q++;
            } else if (y[l] == j) {
              xij[q] = x[l];
              yij[q] = -1;
              if (weight != null) {
                wij[q] = weight[l];
              }
              q++;
            }
          }

          tasks.add(new TrainingTask(svms.get(m), xij, yij, wij));
        }
      }

      try {
        MulticoreExecutor.run(tasks);
      } catch (Exception e) {
        System.err.println(e.getMessage());
      }
    }
  }
    /**
     * Trains the SVM with the given dataset for one epoch. The caller may call this method multiple
     * times to obtain better accuracy although one epoch is usually sufficient. After calling this
     * method sufficient times (usually 1 or 2), the users should call {@link #finalize()} to
     * further process support vectors.
     */
    void learn(T[] x, int[] y, double[] weight) {
      if (p == 0 && kernel instanceof Linear) {
        if (x instanceof double[][]) {
          double[] x0 = (double[]) x[0];
          p = x0.length;
        } else if (x instanceof float[][]) {
          float[] x0 = (float[]) x[0];
          p = x0.length;
        } else {
          throw new UnsupportedOperationException("Unsupported data type for linear kernel.");
        }
      }

      int c1 = 0, c2 = 0;
      for (SupportVector v : sv) {
        if (v != null) {
          if (v.y > 0) c1++;
          else if (v.y < 0) c2++;
        }
      }

      // If the SVM is empty or has very few support vectors, use some
      // instances as initial support vectors.
      final int n = x.length;
      if (c1 < 5 || c2 < 5) {
        for (int i = 0; i < n; i++) {
          if (y[i] == 1 && c1 < 5) {
            if (weight == null) {
              process(x[i], y[i]);
            } else {
              process(x[i], y[i], weight[i]);
            }
            c1++;
          }
          if (y[i] == -1 && c2 < 5) {
            if (weight == null) {
              process(x[i], y[i]);
            } else {
              process(x[i], y[i], weight[i]);
            }
            c2++;
          }
          if (c1 >= 5 && c2 >= 5) {
            break;
          }
        }
      }

      // train SVM in a stochastic order.
      int[] index = Matrix.Indices(0, n); // Tools.Random().permutate(n);
      for (int i = 0; i < n; i++) {
        if (weight == null) {
          process(x[index[i]], y[index[i]]);
        } else {
          process(x[index[i]], y[index[i]], weight[index[i]]);
        }

        do {
          reprocess(tol); // at least one call to reprocess
          minmax();
        } while (gmax - gmin > 1000);
      }
    }