예제 #1
0
  /**
   * Gets the current settings of the classifier.
   *
   * @return an array of strings suitable for passing to setOptions
   */
  public String[] getOptions() {

    Vector<String> result = new Vector<String>();

    result.add("-C");
    result.add("" + getC());

    result.add("-N");
    result.add("" + m_filterType);

    result.add("-I");
    result.add(
        ""
            + getRegOptimizer().getClass().getName()
            + " "
            + Utils.joinOptions(getRegOptimizer().getOptions()));

    result.add("-K");
    result.add(
        "" + getKernel().getClass().getName() + " " + Utils.joinOptions(getKernel().getOptions()));

    Collections.addAll(result, super.getOptions());

    return (String[]) result.toArray(new String[result.size()]);
  }
예제 #2
0
  /**
   * Gets the current settings of FuzzyRoughSubsetEval
   *
   * @return an array of strings suitable for passing to setOptions()
   */
  public String[] getOptions() {
    Vector<String> result;

    result = new Vector<String>();

    result.add("-Z");
    result.add(
        (m_FuzzyMeasure.getClass().getName() + " " + Utils.joinOptions(m_FuzzyMeasure.getOptions()))
            .trim());

    result.add("-I");
    result.add(
        (m_Implicator.getClass().getName() + " " + Utils.joinOptions(m_Implicator.getOptions()))
            .trim());

    result.add("-T");
    result.add(
        (m_TNorm.getClass().getName() + " " + Utils.joinOptions(m_TNorm.getOptions())).trim());

    result.add("-R");
    result.add(
        (m_Similarity.getClass().getName() + " " + Utils.joinOptions(m_Similarity.getOptions()))
            .trim());

    return result.toArray(new String[result.size()]);
  }
  /**
   * Gets the current settings of the classifier.
   *
   * @return an array of strings suitable for passing to setOptions
   */
  @Override
  public String[] getOptions() {
    ArrayList<String> options = new ArrayList<String>();

    if (getUseWordFrequencies()) {
      options.add("-W");
    }
    options.add("-P");
    options.add("" + getPeriodicPruning());
    options.add("-M");
    options.add("" + getMinWordFrequency());

    if (getNormalizeDocLength()) {
      options.add("-normalize");
    }
    options.add("-norm");
    options.add("" + getNorm());
    options.add("-lnorm");
    options.add("" + getLNorm());
    if (getLowercaseTokens()) {
      options.add("-lowercase");
    }
    if (getUseStopList()) {
      options.add("-stoplist");
    }
    if (!getStopwords().isDirectory()) {
      options.add("-stopwords");
      options.add(getStopwords().getAbsolutePath());
    }

    options.add("-tokenizer");
    String spec = getTokenizer().getClass().getName();
    if (getTokenizer() instanceof OptionHandler) {
      spec += " " + Utils.joinOptions(((OptionHandler) getTokenizer()).getOptions());
    }
    options.add(spec.trim());

    if (getStemmer() != null) {
      options.add("-stemmer");
      spec = getStemmer().getClass().getName();
      if (getStemmer() instanceof OptionHandler) {
        spec += " " + Utils.joinOptions(((OptionHandler) getStemmer()).getOptions());
      }

      options.add(spec.trim());
    }

    Collections.addAll(options, super.getOptions());

    return options.toArray(new String[1]);
  }
예제 #4
0
파일: Wavelet.java 프로젝트: dachylong/weka
  /**
   * returns the options of the current setup
   *
   * @return the current options
   */
  public String[] getOptions() {
    int i;
    Vector result;
    String[] options;

    result = new Vector();
    options = super.getOptions();
    for (i = 0; i < options.length; i++) result.add(options[i]);

    result.add("-A");
    result.add("" + getAlgorithm().getSelectedTag().getReadable());

    result.add("-P");
    result.add("" + getPadding().getSelectedTag().getReadable());

    result.add("-F");
    if (getFilter() instanceof OptionHandler)
      result.add(
          getFilter().getClass().getName()
              + " "
              + Utils.joinOptions(((OptionHandler) getFilter()).getOptions()));
    else result.add(getFilter().getClass().getName());

    return (String[]) result.toArray(new String[result.size()]);
  }
예제 #5
0
  /**
   * Gets the filter specification string, which contains the class name of the filter and any
   * options to the filter
   *
   * @return the filter string.
   */
  protected String getFilterSpec() {
    Filter c = getFilter();

    if (c instanceof OptionHandler)
      return c.getClass().getName() + " " + Utils.joinOptions(((OptionHandler) c).getOptions());
    else return c.getClass().getName();
  }
예제 #6
0
  /**
   * Gets the current settings of IBk.
   *
   * @return an array of strings suitable for passing to setOptions()
   */
  public String[] getOptions() {

    String[] options = new String[11];
    int current = 0;
    options[current++] = "-K";
    options[current++] = "" + getKNN();
    options[current++] = "-W";
    options[current++] = "" + m_WindowSize;
    if (getCrossValidate()) {
      options[current++] = "-X";
    }
    if (getMeanSquared()) {
      options[current++] = "-E";
    }
    if (m_DistanceWeighting == WEIGHT_INVERSE) {
      options[current++] = "-I";
    } else if (m_DistanceWeighting == WEIGHT_SIMILARITY) {
      options[current++] = "-F";
    }

    options[current++] = "-A";
    options[current++] =
        m_NNSearch.getClass().getName() + " " + Utils.joinOptions(m_NNSearch.getOptions());

    while (current < options.length) {
      options[current++] = "";
    }

    return options;
  }
예제 #7
0
  /**
   * Gets the current settings of SimpleKMeans.
   *
   * @return an array of strings suitable for passing to setOptions()
   */
  public String[] getOptions() {
    ArrayList<String> options = new ArrayList<String>();

    options.add("-I");
    options.add("" + getMaxIterations());
    options.add("-min");
    options.add("" + getMinNumClusters());
    options.add("-max");
    options.add("" + getMaxNumClusters());
    options.add("-restarts");
    options.add("" + getRestarts());
    if (isManuallySelectNumClusters()) {
      options.add("-manual");
    }
    if (getInitializeUsingKMeansPlusPlusMethod()) {
      options.add("-P");
    }
    if (isPrintDebug()) {
      options.add("-debug");
    }

    options.add("-A");
    options.add(
        (distanceFunction.getClass().getName()
                + " "
                + Utils.joinOptions(distanceFunction.getOptions()))
            .trim());

    return (String[]) options.toArray(new String[options.size()]);
  }
예제 #8
0
  @Override
  public void setOptions(String[] options) throws Exception {

    // these are options to the hadoop map task (especially the -names-file)

    String existing = Utils.getOption("existing-header", options);
    setPathToExistingHeader(existing);

    String attNames = Utils.getOption('A', options);
    setAttributeNames(attNames);

    String namesFile = Utils.getOption("names-file", options);
    setAttributeNamesFile(namesFile);

    String outputName = Utils.getOption("header-file-name", options);
    setOutputHeaderFileName(outputName);

    super.setOptions(options);

    // any options to pass on to the underlying Weka csv to arff map task?
    CSVToARFFHeaderMapTask tempMap = new CSVToARFFHeaderMapTask();
    tempMap.setOptions(options);

    String optsToWekaMapTask = Utils.joinOptions(tempMap.getOptions());
    if (!DistributedJobConfig.isEmpty(optsToWekaMapTask)) {
      setCsvToArffTaskOptions(optsToWekaMapTask);
    }
  }
예제 #9
0
  /**
   * Returns description of the bias-variance decomposition results.
   *
   * @return the bias-variance decomposition results as a string
   */
  public String toString() {

    String result = "\nBias-Variance Decomposition\n";

    if (getClassifier() == null) {
      return "Invalid setup";
    }

    result += "\nClassifier   : " + getClassifier().getClass().getName();
    if (getClassifier() instanceof OptionHandler) {
      result += Utils.joinOptions(((OptionHandler) m_Classifier).getOptions());
    }
    result += "\nData File    : " + getDataFileName();
    result += "\nClass Index  : ";
    if (getClassIndex() == 0) {
      result += "last";
    } else {
      result += getClassIndex();
    }
    result += "\nTraining Pool: " + getTrainPoolSize();
    result += "\nIterations   : " + getTrainIterations();
    result += "\nSeed         : " + getSeed();
    result += "\nError        : " + Utils.doubleToString(getError(), 6, 4);
    result += "\nSigma^2      : " + Utils.doubleToString(getSigma(), 6, 4);
    result += "\nBias^2       : " + Utils.doubleToString(getBias(), 6, 4);
    result += "\nVariance     : " + Utils.doubleToString(getVariance(), 6, 4);

    return result + "\n";
  }
예제 #10
0
  /**
   * Gets the classifier specification string, which contains the class name of the classifier and
   * any options to the classifier
   *
   * @param index the index of the classifier string to retrieve, starting from 0.
   * @return the classifier string, or the empty string if no classifier has been assigned (or the
   *     index given is out of range).
   */
  protected String getClassifierSpec(int index) {

    if (m_Classifiers.length < index) {
      return "";
    }
    Classifier c = getClassifier(index);
    return c.getClass().getName() + " " + Utils.joinOptions(((OptionHandler) c).getOptions());
  }
예제 #11
0
  /**
   * Gets the search specification string, which contains the class name of the search method and
   * any options to it
   *
   * @return the search string.
   */
  protected String getSearchSpec() {

    ASSearch s = getSearch();
    if (s instanceof OptionHandler) {
      return s.getClass().getName() + " " + Utils.joinOptions(((OptionHandler) s).getOptions());
    }
    return s.getClass().getName();
  }
예제 #12
0
  /**
   * Gets the classifier specification string, which contains the class name of the classifier and
   * any options to the classifier.
   *
   * @return the classifier string.
   */
  protected String getClassifierSpec() {

    Classifier c = getClassifier();
    if (c instanceof OptionHandler) {
      return c.getClass().getName() + " " + Utils.joinOptions(((OptionHandler) c).getOptions());
    }
    return c.getClass().getName();
  }
  /**
   * returns the specification of the given object (class, options if an instance of OptionHandler)
   *
   * @param o the object to get the specs as string
   * @return the specification string
   */
  protected String getSpecification(Object o) {
    String result;

    result = o.getClass().getName();
    if (o instanceof OptionHandler)
      result += " " + Utils.joinOptions(((OptionHandler) o).getOptions());

    return result.trim();
  }
  /**
   * Returns graph describing the classifier (if possible).
   *
   * @return the graph of the classifier in dotty format
   * @throws Exception if the classifier cannot be graphed
   */
  public String graph() throws Exception {

    if (m_Classifier instanceof Drawable) return ((Drawable) m_Classifier).graph();
    else
      throw new Exception(
          "Classifier: "
              + m_Classifier.getClass().getName()
              + " "
              + Utils.joinOptions(m_BestClassifierOptions)
              + " cannot be graphed");
  }
  /**
   * Apply the complete list of options to the current underlying job
   *
   * @param opts the options to apply
   */
  protected void applyOptionsToJob(List<String> opts) {
    String combined = Utils.joinOptions(opts.toArray(new String[opts.size()]));
    System.err.println("Combined: " + combined);

    if (!combined.equals(m_optionsOrig)) {
      m_modifyListener.setModifiedStatus(this, true);
    }

    m_bean.setJobOptions(combined);

    m_parentWindow.dispose();
  }
예제 #16
0
  /**
   * Parses a given list of options.
   *
   * @param options the list of options as an array of strings
   * @throws Exception if an option is not supported
   */
  public void setOptions(String[] options) throws Exception {
    String tmpStr;

    m_GroovyOptions = new String[0];

    tmpStr = Utils.getOption('G', options);
    if (tmpStr.length() != 0) setGroovyModule(new File(tmpStr));
    else setGroovyModule(new File(System.getProperty("user.dir")));

    setGroovyOptions(Utils.joinOptions(Utils.partitionOptions(options).clone()));

    super.setOptions(options);
  }
예제 #17
0
  /** outputs some data about the classifier */
  public String toString() {
    StringBuffer result;

    result = new StringBuffer();
    result.append("Weka - Demo\n===========\n\n");

    result.append(
        "Classifier...: "
            + m_Classifier.getClass().getName()
            + " "
            + Utils.joinOptions(m_Classifier.getOptions())
            + "\n");
    if (m_Filter instanceof OptionHandler)
      result.append(
          "Filter.......: "
              + m_Filter.getClass().getName()
              + " "
              + Utils.joinOptions(((OptionHandler) m_Filter).getOptions())
              + "\n");
    else result.append("Filter.......: " + m_Filter.getClass().getName() + "\n");
    result.append("Training file: " + m_TrainingFile + "\n");
    result.append("\n");

    result.append(m_Classifier.toString() + "\n");
    result.append(m_Evaluation.toSummaryString() + "\n");
    try {
      result.append(m_Evaluation.toMatrixString() + "\n");
    } catch (Exception e) {
      e.printStackTrace();
    }
    try {
      result.append(m_Evaluation.toClassDetailsString() + "\n");
    } catch (Exception e) {
      e.printStackTrace();
    }

    return result.toString();
  }
  protected void updateOptions() {

    if (m_clusterer instanceof OptionHandler) {
      m_clustererOptions = Utils.joinOptions(((OptionHandler) m_clusterer).getOptions());
    } else {
      m_clustererOptions = "";
    }
    if (m_clusterer instanceof Serializable) {
      ObjectStreamClass obs = ObjectStreamClass.lookup(m_clusterer.getClass());
      m_clustererVersion = "" + obs.getSerialVersionUID();
    } else {
      m_clustererVersion = "";
    }
  }
예제 #19
0
  @Override
  public void setOptions(String[] options) throws Exception {
    String ns = Utils.getOption('N', options);

    if (ns.length() > 0) {
      setNamespace(ns);
    }

    // scheme options after the --
    String[] schemeOpts = Utils.partitionOptions(options);
    if (schemeOpts != null && schemeOpts.length > 0) {
      String sO = Utils.joinOptions(schemeOpts);
      setSchemeOptions(sO);
    }
  }
예제 #20
0
  /**
   * Returns the options for this node and its subtree.
   *
   * @param node the node to get the options for
   * @return the generated options
   */
  protected String getOptions(DefaultMutableTreeNode node) {
    Vector<String> options;
    int i;

    options = new Vector<String>();

    // the node itself
    if (!node.toString().equals(LABEL_ROOT) && !node.toString().equals(LABEL_NESTED))
      options.add(node.toString());

    // the node's children
    for (i = 0; i < node.getChildCount(); i++)
      options.add(getOptions((DefaultMutableTreeNode) node.getChildAt(i)));

    return Utils.joinOptions(options.toArray(new String[options.size()]));
  }
예제 #21
0
  /**
   * Displays a frame with the option tree panel.
   *
   * @param args displayed in the full options text
   */
  public static void main(String[] args) {
    OptionTree tree = new OptionTree();
    final JFrame jf = new JFrame("Option Tree");
    jf.getContentPane().setLayout(new BorderLayout());
    jf.getContentPane().add(tree, BorderLayout.CENTER);
    jf.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
    jf.addWindowListener(
        new WindowAdapter() {
          public void windowClosing(WindowEvent e) {
            jf.dispose();
          }
        });
    jf.pack();
    jf.setSize(800, 600);
    jf.setLocationRelativeTo(null);
    jf.setVisible(true);

    if (args.length > 0) tree.setOptions(Utils.joinOptions(args));
  }
  /**
   * Returns description of the cross-validated classifier.
   *
   * @return description of the cross-validated classifier as a string
   */
  public String toString() {

    if (m_InitOptions == null) return "CVParameterSelection: No model built yet.";

    String result =
        "Cross-validated Parameter selection.\n"
            + "Classifier: "
            + m_Classifier.getClass().getName()
            + "\n";
    try {
      for (int i = 0; i < m_CVParams.size(); i++) {
        CVParameter cvParam = (CVParameter) m_CVParams.elementAt(i);
        result +=
            "Cross-validation Parameter: '-"
                + cvParam.m_ParamChar
                + "'"
                + " ranged from "
                + cvParam.m_Lower
                + " to ";
        switch ((int) (cvParam.m_Lower - cvParam.m_Upper + 0.5)) {
          case 1:
            result += m_NumAttributes;
            break;
          case 2:
            result += m_TrainFoldSize;
            break;
          default:
            result += cvParam.m_Upper;
            break;
        }
        result += " with " + cvParam.m_Steps + " steps\n";
      }
    } catch (Exception ex) {
      result += ex.getMessage();
    }
    result +=
        "Classifier Options: "
            + Utils.joinOptions(m_BestClassifierOptions)
            + "\n\n"
            + m_Classifier.toString();
    return result;
  }
  /**
   * generates a filename with a certain suffix for the logQuality method
   *
   * @param suffix the suffix
   * @return the generated filename
   * @see #logQuality(int,int)
   */
  protected String createFilename(String suffix) {
    String result;

    result =
        this.getClass().getName()
            + "-"
            + CollectiveHelper.generateMD5(Utils.joinOptions(this.getOptions()))
            + "-"
            + (m_Trainset == null
                ? "null"
                : m_Trainset.relationName().replaceAll("-weka\\.filters\\..*", ""))
            + "-R"
            + getNumRestarts()
            + "-I"
            + getNumIterations()
            + "-E"
            + m_EvaluationType
            + "-C"
            + m_ComparisonType
            + suffix;

    return result;
  }
예제 #24
0
파일: LWL.java 프로젝트: alishakiba/jDenetX
  /**
   * Gets the current settings of the classifier.
   *
   * @return an array of strings suitable for passing to setOptions
   */
  public String[] getOptions() {

    String[] superOptions = super.getOptions();
    String[] options = new String[superOptions.length + 6];

    int current = 0;

    options[current++] = "-U";
    options[current++] = "" + getWeightingKernel();
    if ((getKNN() == 0) && m_UseAllK) {
      options[current++] = "-K";
      options[current++] = "-1";
    } else {
      options[current++] = "-K";
      options[current++] = "" + getKNN();
    }
    options[current++] = "-A";
    options[current++] =
        m_NNSearch.getClass().getName() + " " + Utils.joinOptions(m_NNSearch.getOptions());

    System.arraycopy(superOptions, 0, options, current, superOptions.length);

    return options;
  }
  /**
   * A concise description of the model.
   *
   * @return a concise description of the model
   */
  public String toSummaryString() {

    String result = "Selected values: " + Utils.joinOptions(m_BestClassifierOptions);
    return result + '\n';
  }
예제 #26
0
  @Override
  protected void notifyJobOutputListeners() {
    weka.classifiers.Classifier finalClassifier =
        ((weka.distributed.spark.WekaClassifierSparkJob) m_runningJob).getClassifier();
    Instances modelHeader =
        ((weka.distributed.spark.WekaClassifierSparkJob) m_runningJob).getTrainingHeader();
    String classAtt =
        ((weka.distributed.spark.WekaClassifierSparkJob) m_runningJob).getClassAttribute();
    try {
      weka.distributed.spark.WekaClassifierSparkJob.setClassIndex(classAtt, modelHeader, true);
    } catch (Exception ex) {
      if (m_log != null) {
        m_log.logMessage(statusMessagePrefix() + ex.getMessage());
      }
      ex.printStackTrace();
    }

    if (finalClassifier == null) {
      if (m_log != null) {
        m_log.logMessage(statusMessagePrefix() + "No classifier produced!");
      }
    }

    if (modelHeader == null) {
      if (m_log != null) {
        m_log.logMessage(statusMessagePrefix() + "No training header available for the model!");
      }
    }

    if (finalClassifier != null) {
      if (m_textListeners.size() > 0) {
        String textual = finalClassifier.toString();

        String title = "Spark: ";
        String classifierSpec = finalClassifier.getClass().getName();
        if (finalClassifier instanceof OptionHandler) {
          classifierSpec += " " + Utils.joinOptions(((OptionHandler) finalClassifier).getOptions());
        }
        title += classifierSpec;
        TextEvent te = new TextEvent(this, textual, title);
        for (TextListener t : m_textListeners) {
          t.acceptText(te);
        }
      }

      if (modelHeader != null) {
        // have to add a single bogus instance to the header to trick
        // the SerializedModelSaver into saving it (since it ignores
        // structure only DataSetEvents) :-)
        double[] vals = new double[modelHeader.numAttributes()];
        for (int i = 0; i < vals.length; i++) {
          vals[i] = Utils.missingValue();
        }
        Instance tempI = new DenseInstance(1.0, vals);
        modelHeader.add(tempI);
        DataSetEvent dse = new DataSetEvent(this, modelHeader);
        BatchClassifierEvent be = new BatchClassifierEvent(this, finalClassifier, dse, dse, 1, 1);
        for (BatchClassifierListener b : m_classifierListeners) {
          b.acceptClassifier(be);
        }
      }
    }
  }
 /**
  * returns a string representation of the cluster
  *
  * @return the cluster definition as string
  */
 public String toString() {
   return this.getClass().getName() + ": " + Utils.joinOptions(getOptions());
 }
예제 #28
0
 /**
  * Gets the Groovy module options.
  *
  * @return the options
  */
 public String getGroovyOptions() {
   return Utils.joinOptions(m_GroovyOptions);
 }
  /**
   * Takes an evaluation object from a task and aggregates it with the overall one.
   *
   * @param eval the evaluation object to aggregate
   * @param classifier the classifier used by the task
   * @param testData the testData from the task
   * @param plotInstances the ClassifierErrorsPlotInstances object from the task
   * @param setNum the set number processed by the task
   * @param maxSetNum the maximum number of sets in this batch
   */
  protected synchronized void aggregateEvalTask(
      Evaluation eval,
      Classifier classifier,
      Instances testData,
      ClassifierErrorsPlotInstances plotInstances,
      int setNum,
      int maxSetNum) {

    m_eval.aggregate(eval);

    if (m_aggregatedPlotInstances == null) {
      m_aggregatedPlotInstances = new Instances(plotInstances.getPlotInstances());
      m_aggregatedPlotShapes = plotInstances.getPlotShapes();
      m_aggregatedPlotSizes = plotInstances.getPlotSizes();
    } else {
      Instances temp = plotInstances.getPlotInstances();
      for (int i = 0; i < temp.numInstances(); i++) {
        m_aggregatedPlotInstances.add(temp.get(i));
        m_aggregatedPlotShapes.addElement(plotInstances.getPlotShapes().get(i));
        m_aggregatedPlotSizes.addElement(plotInstances.getPlotSizes().get(i));
      }
    }
    m_setsComplete++;

    //  if (ce.getSetNumber() == ce.getMaxSetNumber()) {
    if (m_setsComplete == maxSetNum) {
      try {
        String textTitle = classifier.getClass().getName();
        String textOptions = "";
        if (classifier instanceof OptionHandler) {
          textOptions = Utils.joinOptions(((OptionHandler) classifier).getOptions());
        }
        textTitle = textTitle.substring(textTitle.lastIndexOf('.') + 1, textTitle.length());
        String resultT =
            "=== Evaluation result ===\n\n"
                + "Scheme: "
                + textTitle
                + "\n"
                + ((textOptions.length() > 0) ? "Options: " + textOptions + "\n" : "")
                + "Relation: "
                + testData.relationName()
                + "\n\n"
                + m_eval.toSummaryString();

        if (testData.classAttribute().isNominal()) {
          resultT += "\n" + m_eval.toClassDetailsString() + "\n" + m_eval.toMatrixString();
        }

        TextEvent te = new TextEvent(ClassifierPerformanceEvaluator.this, resultT, textTitle);
        notifyTextListeners(te);

        // set up visualizable errors
        if (m_visualizableErrorListeners.size() > 0) {
          PlotData2D errorD = new PlotData2D(m_aggregatedPlotInstances);
          errorD.setShapeSize(m_aggregatedPlotSizes);
          errorD.setShapeType(m_aggregatedPlotShapes);
          errorD.setPlotName(textTitle + " " + textOptions);

          /*          PlotData2D errorD = m_PlotInstances.getPlotData(
          textTitle + " " + textOptions); */
          VisualizableErrorEvent vel =
              new VisualizableErrorEvent(ClassifierPerformanceEvaluator.this, errorD);
          notifyVisualizableErrorListeners(vel);
          m_PlotInstances.cleanUp();
        }

        if (testData.classAttribute().isNominal() && m_thresholdListeners.size() > 0) {
          ThresholdCurve tc = new ThresholdCurve();
          Instances result = tc.getCurve(m_eval.predictions(), 0);
          result.setRelationName(testData.relationName());
          PlotData2D pd = new PlotData2D(result);
          String htmlTitle = "<html><font size=-2>" + textTitle;
          String newOptions = "";
          if (classifier instanceof OptionHandler) {
            String[] options = ((OptionHandler) classifier).getOptions();
            if (options.length > 0) {
              for (int ii = 0; ii < options.length; ii++) {
                if (options[ii].length() == 0) {
                  continue;
                }
                if (options[ii].charAt(0) == '-'
                    && !(options[ii].charAt(1) >= '0' && options[ii].charAt(1) <= '9')) {
                  newOptions += "<br>";
                }
                newOptions += options[ii];
              }
            }
          }

          htmlTitle +=
              " "
                  + newOptions
                  + "<br>"
                  + " (class: "
                  + testData.classAttribute().value(0)
                  + ")"
                  + "</font></html>";
          pd.setPlotName(textTitle + " (class: " + testData.classAttribute().value(0) + ")");
          pd.setPlotNameHTML(htmlTitle);
          boolean[] connectPoints = new boolean[result.numInstances()];
          for (int jj = 1; jj < connectPoints.length; jj++) {
            connectPoints[jj] = true;
          }

          pd.setConnectPoints(connectPoints);

          ThresholdDataEvent rde =
              new ThresholdDataEvent(
                  ClassifierPerformanceEvaluator.this, pd, testData.classAttribute());
          notifyThresholdListeners(rde);
        }
        if (m_logger != null) {
          m_logger.statusMessage(statusMessagePrefix() + "Finished.");
        }

      } catch (Exception ex) {
        if (m_logger != null) {
          m_logger.logMessage(
              "[ClassifierPerformanceEvaluator] "
                  + statusMessagePrefix()
                  + " problem constructing evaluation results. "
                  + ex.getMessage());
        }
        ex.printStackTrace();
      } finally {
        m_visual.setStatic();
        // save memory
        m_PlotInstances = null;
        m_setsComplete = 0;
        m_tasks = null;
        m_aggregatedPlotInstances = null;
      }
    }
  }