/**
  * Trains a new model for the {@link DocumentCategorizerME}.
  *
  * @param eventStream
  * @return the new model
  */
 public static AbstractModel train(DocumentCategorizerEventStream eventStream) throws IOException {
   return GIS.trainModel(100, new TwoPassDataIndexer(eventStream, 5));
 }
示例#2
0
  public static AbstractModel train(
      EventStream events, Map<String, String> trainParams, Map<String, String> reportMap)
      throws IOException {

    if (!isValid(trainParams)) throw new IllegalArgumentException("trainParams are not valid!");

    if (isSequenceTraining(trainParams))
      throw new IllegalArgumentException("sequence training is not supported by this method!");

    String algorithmName = getStringParam(trainParams, ALGORITHM_PARAM, MAXENT_VALUE, reportMap);

    int iterations = getIntParam(trainParams, ITERATIONS_PARAM, ITERATIONS_DEFAULT, reportMap);

    int cutoff = getIntParam(trainParams, CUTOFF_PARAM, CUTOFF_DEFAULT, reportMap);

    boolean sortAndMerge;

    if (MAXENT_VALUE.equals(algorithmName)) sortAndMerge = true;
    else if (PERCEPTRON_VALUE.equals(algorithmName)) sortAndMerge = false;
    else throw new IllegalStateException("Unexpected algorihtm name: " + algorithmName);

    HashSumEventStream hses = new HashSumEventStream(events);

    String dataIndexerName =
        getStringParam(trainParams, DATA_INDEXER_PARAM, DATA_INDEXER_TWO_PASS_VALUE, reportMap);

    DataIndexer indexer = null;

    if (DATA_INDEXER_ONE_PASS_VALUE.equals(dataIndexerName)) {
      indexer = new OnePassDataIndexer(hses, cutoff, sortAndMerge);
    } else if (DATA_INDEXER_TWO_PASS_VALUE.equals(dataIndexerName)) {
      indexer = new TwoPassDataIndexer(hses, cutoff, sortAndMerge);
    } else {
      throw new IllegalStateException("Unexpected data indexer name: " + dataIndexerName);
    }

    AbstractModel model;
    if (MAXENT_VALUE.equals(algorithmName)) {

      int threads = getIntParam(trainParams, "Threads", 1, reportMap);

      model = opennlp.maxent.GIS.trainModel(iterations, indexer, true, false, null, 0, threads);
    } else if (PERCEPTRON_VALUE.equals(algorithmName)) {
      boolean useAverage = getBooleanParam(trainParams, "UseAverage", true, reportMap);

      boolean useSkippedAveraging =
          getBooleanParam(trainParams, "UseSkippedAveraging", false, reportMap);

      // overwrite otherwise it might not work
      if (useSkippedAveraging) useAverage = true;

      double stepSizeDecrease = getDoubleParam(trainParams, "StepSizeDecrease", 0, reportMap);

      double tolerance =
          getDoubleParam(trainParams, "Tolerance", PerceptronTrainer.TOLERANCE_DEFAULT, reportMap);

      opennlp.perceptron.PerceptronTrainer perceptronTrainer =
          new opennlp.perceptron.PerceptronTrainer();
      perceptronTrainer.setSkippedAveraging(useSkippedAveraging);

      if (stepSizeDecrease > 0) perceptronTrainer.setStepSizeDecrease(stepSizeDecrease);

      perceptronTrainer.setTolerance(tolerance);

      model = perceptronTrainer.trainModel(iterations, indexer, cutoff, useAverage);
    } else {
      throw new IllegalStateException("Algorithm not supported: " + algorithmName);
    }

    if (reportMap != null)
      reportMap.put("Training-Eventhash", hses.calculateHashSum().toString(16));

    return model;
  }