/**
   * Parses a given list of options.
   *
   * <p>
   * <!-- options-start -->
   * Valid options are:
   *
   * <p>
   *
   * <pre> -folds &lt;folds&gt;
   *  The number of folds for splitting the training set into
   *  train and test set. The first fold is always the training
   *  set. With '-V' you can invert this, i.e., instead of 20/80
   *  for 5 folds you'll get 80/20.
   *  (default 5)</pre>
   *
   * <pre> -V
   *  Inverts the fold selection, i.e., instead of using the first
   *  fold for the training set it is used for test set and the
   *  remaining folds for training.</pre>
   *
   * <pre> -verbose
   *  Whether to print some more information during building the
   *  classifier.
   *  (default is off)</pre>
   *
   * <pre> -insight
   *  Whether to use the labels of the original test set for more
   *  statistics (not used for learning!).
   *  (default is off)</pre>
   *
   * <pre> -S &lt;num&gt;
   *  Random number seed.
   *  (default 1)</pre>
   *
   * <pre> -D
   *  If set, classifier is run in debug mode and
   *  may output additional info to the console</pre>
   *
   * <pre> -naive
   *  Uses a sorted list (ordered according to distance) instead of the
   *  KDTree for finding the neighbors.
   *  (default is KDTree)</pre>
   *
   * <pre> -I
   *  Weight neighbours by the inverse of their distance
   *  (use when k &gt; 1)</pre>
   *
   * <pre> -F
   *  Weight neighbours by 1 - their distance
   *  (use when k &gt; 1)</pre>
   *
   * <pre> -K &lt;number of neighbors&gt;
   *  Number of nearest neighbours (k) used in classification.
   *  (Default = 1)</pre>
   *
   * <pre> -A
   *  The nearest neighbour search algorithm to use (default: LinearNN).
   * </pre>
   *
   * <!-- options-end -->
   *
   * @param options the list of options as an array of strings
   * @throws Exception if an option is not supported
   */
  public void setOptions(String[] options) throws Exception {
    super.setOptions(options);

    setUseNaiveSearch(Utils.getFlag("naive", options));

    m_Classifier.setOptions(options);
    m_KNN = m_Classifier.getKNN(); // backup KNN
    m_Classifier.setCrossValidate(true); // always on!
    m_Classifier.setWindowSize(0); // always off!
    m_Classifier.setMeanSquared(false); // always off!
  }
  /** performs initialization of members */
  protected void initializeMembers() {
    super.initializeMembers();

    m_KNNdetermined = -1;
    m_NeighborsTestset = null;
    m_TrainsetNew = null;
    m_TestsetNew = null;
    m_UseNaiveSearch = false;
    m_LabeledTestset = null;
    m_Missing = new ReplaceMissingValues();

    m_Classifier = new IBk();
    m_Classifier.setKNN(10);
    m_Classifier.setCrossValidate(true);
    m_Classifier.setWindowSize(0);
    m_Classifier.setMeanSquared(false);

    m_KNN = m_Classifier.getKNN();

    m_AdditionalMeasures.add("measureDeterminedKNN");
  }