/** * Parses a given list of options. * * <p> * <!-- options-start --> * Valid options are: * * <p> * * <pre> -folds <folds> * The number of folds for splitting the training set into * train and test set. The first fold is always the training * set. With '-V' you can invert this, i.e., instead of 20/80 * for 5 folds you'll get 80/20. * (default 5)</pre> * * <pre> -V * Inverts the fold selection, i.e., instead of using the first * fold for the training set it is used for test set and the * remaining folds for training.</pre> * * <pre> -verbose * Whether to print some more information during building the * classifier. * (default is off)</pre> * * <pre> -insight * Whether to use the labels of the original test set for more * statistics (not used for learning!). * (default is off)</pre> * * <pre> -S <num> * Random number seed. * (default 1)</pre> * * <pre> -D * If set, classifier is run in debug mode and * may output additional info to the console</pre> * * <pre> -naive * Uses a sorted list (ordered according to distance) instead of the * KDTree for finding the neighbors. * (default is KDTree)</pre> * * <pre> -I * Weight neighbours by the inverse of their distance * (use when k > 1)</pre> * * <pre> -F * Weight neighbours by 1 - their distance * (use when k > 1)</pre> * * <pre> -K <number of neighbors> * Number of nearest neighbours (k) used in classification. * (Default = 1)</pre> * * <pre> -A * The nearest neighbour search algorithm to use (default: LinearNN). * </pre> * * <!-- options-end --> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ public void setOptions(String[] options) throws Exception { super.setOptions(options); setUseNaiveSearch(Utils.getFlag("naive", options)); m_Classifier.setOptions(options); m_KNN = m_Classifier.getKNN(); // backup KNN m_Classifier.setCrossValidate(true); // always on! m_Classifier.setWindowSize(0); // always off! m_Classifier.setMeanSquared(false); // always off! }