This is an automated email from the ASF dual-hosted git repository.

mawiesne pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/opennlp.git


The following commit(s) were added to refs/heads/main by this push:
     new cfb6c57c OPENNLP-1643 - Remove inconsistent Training Parameter 
Definitions (#682)
cfb6c57c is described below

commit cfb6c57c3920e4a8d9e8bad6d3a0b3276959dc52
Author: Richard Zowalla <[email protected]>
AuthorDate: Fri Nov 8 06:28:35 2024 +0100

    OPENNLP-1643 - Remove inconsistent Training Parameter Definitions (#682)
---
 .../ml/AbstractEventModelSequenceTrainer.java      |  3 +-
 .../opennlp/tools/ml/AbstractEventTrainer.java     |  6 ++--
 .../java/opennlp/tools/ml/AbstractTrainer.java     | 31 +++++++++-----------
 .../main/java/opennlp/tools/ml/TrainerFactory.java | 25 ++++++++--------
 .../java/opennlp/tools/ml/maxent/GISTrainer.java   |  4 +--
 .../tools/ml/model/AbstractDataIndexer.java        |  4 ---
 .../opennlp/tools/ml/model/OnePassDataIndexer.java |  4 ++-
 .../opennlp/tools/ml/model/TwoPassDataIndexer.java |  4 ++-
 .../SimplePerceptronSequenceTrainer.java           |  5 ++--
 .../opennlp/tools/util/TrainingParameters.java     | 24 ++++++----------
 .../tools/doccat/DocumentCategorizerNBTest.java    |  5 ++--
 .../java/opennlp/tools/ml/TrainerFactoryTest.java  |  4 +--
 .../opennlp/tools/ml/maxent/GISIndexingTest.java   | 23 ++++++++-------
 .../tools/ml/maxent/MaxentPrepAttachTest.java      | 11 ++++----
 .../tools/ml/maxent/RealValueModelTest.java        |  3 +-
 .../tools/ml/maxent/ScaleDoesntMatterTest.java     |  3 +-
 .../ml/maxent/io/RealValueFileEventStreamTest.java |  3 +-
 .../maxent/quasinewton/NegLogLikelihoodTest.java   |  3 +-
 .../ml/maxent/quasinewton/QNPrepAttachTest.java    | 19 ++++++-------
 .../tools/ml/maxent/quasinewton/QNTrainerTest.java |  3 +-
 .../ml/naivebayes/NaiveBayesCorrectnessTest.java   |  3 +-
 .../naivebayes/NaiveBayesModelReadWriteTest.java   |  3 +-
 .../ml/naivebayes/NaiveBayesPrepAttachTest.java    | 11 ++++----
 .../NaiveBayesSerializedCorrectnessTest.java       |  3 +-
 .../ml/perceptron/PerceptronPrepAttachTest.java    | 33 +++++++++++-----------
 25 files changed, 109 insertions(+), 131 deletions(-)

diff --git 
a/opennlp-tools/src/main/java/opennlp/tools/ml/AbstractEventModelSequenceTrainer.java
 
b/opennlp-tools/src/main/java/opennlp/tools/ml/AbstractEventModelSequenceTrainer.java
index b233df23..cdbd267f 100644
--- 
a/opennlp-tools/src/main/java/opennlp/tools/ml/AbstractEventModelSequenceTrainer.java
+++ 
b/opennlp-tools/src/main/java/opennlp/tools/ml/AbstractEventModelSequenceTrainer.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import opennlp.tools.ml.model.Event;
 import opennlp.tools.ml.model.MaxentModel;
 import opennlp.tools.ml.model.SequenceStream;
+import opennlp.tools.util.TrainingParameters;
 
 /**
  * A basic {@link EventModelSequenceTrainer} implementation that processes 
{@link Event events}.
@@ -39,7 +40,7 @@ public abstract class AbstractEventModelSequenceTrainer 
extends AbstractTrainer
     validate();
 
     MaxentModel model = doTrain(events);
-    addToReport(AbstractTrainer.TRAINER_TYPE_PARAM, 
EventModelSequenceTrainer.SEQUENCE_VALUE);
+    addToReport(TrainingParameters.TRAINER_TYPE_PARAM, 
EventModelSequenceTrainer.SEQUENCE_VALUE);
     return model;
   }
 
diff --git 
a/opennlp-tools/src/main/java/opennlp/tools/ml/AbstractEventTrainer.java 
b/opennlp-tools/src/main/java/opennlp/tools/ml/AbstractEventTrainer.java
index 9ea5ddce..19b3ba6e 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/AbstractEventTrainer.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/AbstractEventTrainer.java
@@ -57,8 +57,8 @@ public abstract class AbstractEventTrainer extends 
AbstractTrainer implements Ev
 
     trainingParameters.put(AbstractDataIndexer.SORT_PARAM, isSortAndMerge());
     // If the cutoff was set, don't overwrite the value.
-    if (trainingParameters.getIntParameter(CUTOFF_PARAM, -1) == -1) {
-      trainingParameters.put(CUTOFF_PARAM, 5);
+    if (trainingParameters.getIntParameter(TrainingParameters.CUTOFF_PARAM, 
-1) == -1) {
+      trainingParameters.put(TrainingParameters.CUTOFF_PARAM, 
TrainingParameters.CUTOFF_DEFAULT_VALUE);
     }
     
     DataIndexer indexer = 
DataIndexerFactory.getDataIndexer(trainingParameters, reportMap);
@@ -77,7 +77,7 @@ public abstract class AbstractEventTrainer extends 
AbstractTrainer implements Ev
     }
 
     MaxentModel model = doTrain(indexer);
-    addToReport(AbstractTrainer.TRAINER_TYPE_PARAM, EventTrainer.EVENT_VALUE);
+    addToReport(TrainingParameters.TRAINER_TYPE_PARAM, 
EventTrainer.EVENT_VALUE);
     return model;
   }
 
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/AbstractTrainer.java 
b/opennlp-tools/src/main/java/opennlp/tools/ml/AbstractTrainer.java
index f7bc777a..54e315c8 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/AbstractTrainer.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/AbstractTrainer.java
@@ -26,16 +26,6 @@ import opennlp.tools.util.TrainingParameters;
 
 public abstract class AbstractTrainer implements Trainer {
 
-  public static final String ALGORITHM_PARAM = "Algorithm";
-
-  public static final String TRAINER_TYPE_PARAM = "TrainerType";
-
-  public static final String CUTOFF_PARAM = "Cutoff";
-  public static final int CUTOFF_DEFAULT = 5;
-
-  public static final String ITERATIONS_PARAM = "Iterations";
-  public static final int ITERATIONS_DEFAULT = 100;
-
   protected TrainingParameters trainingParameters;
   protected Map<String,String> reportMap;
 
@@ -66,24 +56,27 @@ public abstract class AbstractTrainer implements Trainer {
   }
 
   /**
-   * @return Retrieves the configured {@link #ALGORITHM_PARAM} value.
+   * @return Retrieves the configured {@link 
TrainingParameters#ALGORITHM_PARAM} value.
    */
   public String getAlgorithm() {
-    return trainingParameters.getStringParameter(ALGORITHM_PARAM, 
GISTrainer.MAXENT_VALUE);
+    return 
trainingParameters.getStringParameter(TrainingParameters.ALGORITHM_PARAM,
+        GISTrainer.MAXENT_VALUE);
   }
 
   /**
-   * @return Retrieves the configured {@link #CUTOFF_PARAM} value.
+   * @return Retrieves the configured {@link TrainingParameters#CUTOFF_PARAM} 
value.
    */
   public int getCutoff() {
-    return trainingParameters.getIntParameter(CUTOFF_PARAM, CUTOFF_DEFAULT);
+    return trainingParameters.getIntParameter(TrainingParameters.CUTOFF_PARAM,
+        TrainingParameters.CUTOFF_DEFAULT_VALUE);
   }
 
   /**
-   * @return Retrieves the configured {@link #ITERATIONS_PARAM} value.
+   * @return Retrieves the configured {@link 
TrainingParameters#ITERATIONS_PARAM} value.
    */
   public int getIterations() {
-    return trainingParameters.getIntParameter(ITERATIONS_PARAM, 
ITERATIONS_DEFAULT);
+    return 
trainingParameters.getIntParameter(TrainingParameters.ITERATIONS_PARAM,
+        TrainingParameters.ITERATIONS_DEFAULT_VALUE);
   }
 
   /**
@@ -97,8 +90,10 @@ public abstract class AbstractTrainer implements Trainer {
     // should validate if algorithm is set? What about the Parser?
 
     try {
-      trainingParameters.getIntParameter(CUTOFF_PARAM, CUTOFF_DEFAULT);
-      trainingParameters.getIntParameter(ITERATIONS_PARAM, ITERATIONS_DEFAULT);
+      trainingParameters.getIntParameter(TrainingParameters.CUTOFF_PARAM,
+          TrainingParameters.CUTOFF_DEFAULT_VALUE);
+      trainingParameters.getIntParameter(TrainingParameters.ITERATIONS_PARAM,
+          TrainingParameters.ITERATIONS_DEFAULT_VALUE);
     } catch (NumberFormatException e) {
       throw new IllegalArgumentException(e);
     }
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/TrainerFactory.java 
b/opennlp-tools/src/main/java/opennlp/tools/ml/TrainerFactory.java
index 9e46367a..b47e3a75 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/TrainerFactory.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/TrainerFactory.java
@@ -59,7 +59,7 @@ public class TrainerFactory {
 
   /**
    * Determines the {@link TrainerType} based on the
-   * {@link AbstractTrainer#ALGORITHM_PARAM} value.
+   * {@link TrainingParameters#ALGORITHM_PARAM} value.
    *
    * @param trainParams - A mapping of {@link TrainingParameters training 
parameters}.
    *
@@ -67,7 +67,7 @@ public class TrainerFactory {
    */
   public static TrainerType getTrainerType(TrainingParameters trainParams) {
 
-    String algorithmValue = 
trainParams.getStringParameter(AbstractTrainer.ALGORITHM_PARAM,null);
+    String algorithmValue = 
trainParams.getStringParameter(TrainingParameters.ALGORITHM_PARAM,null);
 
     // Check if it is defaulting to the MAXENT trainer
     if (algorithmValue == null) {
@@ -122,7 +122,7 @@ public class TrainerFactory {
    * Retrieves a {@link SequenceTrainer} that fits the given parameters.
    *
    * @param trainParams The {@link TrainingParameters} to check for the 
trainer type.
-   *                    Note: The entry {@link 
AbstractTrainer#ALGORITHM_PARAM} is used
+   *                    Note: The entry {@link 
TrainingParameters#ALGORITHM_PARAM} is used
    *                    to determine the type.
    * @param reportMap A {@link Map} that shall be used during initialization of
    *                  the {@link SequenceTrainer}.
@@ -132,7 +132,7 @@ public class TrainerFactory {
    */
   public static SequenceTrainer getSequenceModelTrainer(
           TrainingParameters trainParams, Map<String, String> reportMap) {
-    String trainerType = 
trainParams.getStringParameter(AbstractTrainer.ALGORITHM_PARAM,null);
+    String trainerType = 
trainParams.getStringParameter(TrainingParameters.ALGORITHM_PARAM,null);
 
     if (trainerType != null) {
       final SequenceTrainer trainer;
@@ -153,7 +153,7 @@ public class TrainerFactory {
    * Retrieves an {@link EventModelSequenceTrainer} that fits the given 
parameters.
    *
    * @param trainParams The {@link TrainingParameters} to check for the 
trainer type.
-   *                    Note: The entry {@link 
AbstractTrainer#ALGORITHM_PARAM} is used
+   *                    Note: The entry {@link 
TrainingParameters#ALGORITHM_PARAM} is used
    *                    to determine the type.
    * @param reportMap A {@link Map} that shall be used during initialization of
    *                  the {@link EventModelSequenceTrainer}.
@@ -163,7 +163,7 @@ public class TrainerFactory {
    */
   public static <T> EventModelSequenceTrainer<T> getEventModelSequenceTrainer(
           TrainingParameters trainParams, Map<String, String> reportMap) {
-    String trainerType = 
trainParams.getStringParameter(AbstractTrainer.ALGORITHM_PARAM,null);
+    String trainerType = 
trainParams.getStringParameter(TrainingParameters.ALGORITHM_PARAM,null);
 
     if (trainerType != null) {
       final EventModelSequenceTrainer<T> trainer;
@@ -184,7 +184,7 @@ public class TrainerFactory {
    * Retrieves an {@link EventTrainer} that fits the given parameters.
    *
    * @param trainParams The {@link TrainingParameters} to check for the 
trainer type.
-   *                    Note: The entry {@link 
AbstractTrainer#ALGORITHM_PARAM} is used
+   *                    Note: The entry {@link 
TrainingParameters#ALGORITHM_PARAM} is used
    *                    to determine the type. If the type is not defined, the
    *                    {@link GISTrainer#MAXENT_VALUE} will be used.
    * @param reportMap A {@link Map} that shall be used during initialization of
@@ -197,7 +197,7 @@ public class TrainerFactory {
 
     // if the trainerType is not defined -- use the GISTrainer.
     String trainerType = trainParams.getStringParameter(
-            AbstractTrainer.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
+        TrainingParameters.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
 
     final EventTrainer trainer;
     if (BUILTIN_TRAINERS.containsKey(trainerType)) {
@@ -216,7 +216,8 @@ public class TrainerFactory {
   public static boolean isValid(TrainingParameters trainParams) {
 
     // TODO: Need to validate all parameters correctly ... error prone?!
-    String algorithmName = 
trainParams.getStringParameter(AbstractTrainer.ALGORITHM_PARAM,null);
+    String algorithmName = 
trainParams.getStringParameter(TrainingParameters.ALGORITHM_PARAM,
+        null);
 
     // If a trainer type can be determined, then the trainer is valid!
     if (algorithmName != null &&
@@ -227,8 +228,10 @@ public class TrainerFactory {
     try {
       // require that the Cutoff and the number of iterations be an integer.
       // if they are not set, the default values will be ok.
-      trainParams.getIntParameter(AbstractTrainer.CUTOFF_PARAM, 0);
-      trainParams.getIntParameter(AbstractTrainer.ITERATIONS_PARAM, 0);
+      trainParams.getIntParameter(TrainingParameters.CUTOFF_PARAM,
+          TrainingParameters.CUTOFF_DEFAULT_VALUE);
+      trainParams.getIntParameter(TrainingParameters.ITERATIONS_PARAM,
+          TrainingParameters.ITERATIONS_DEFAULT_VALUE);
     }
     catch (NumberFormatException e) {
       return false;
diff --git 
a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISTrainer.java 
b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISTrainer.java
index caa0248c..d2eabeb9 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISTrainer.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISTrainer.java
@@ -287,8 +287,8 @@ public class GISTrainer extends AbstractEventTrainer {
                              int cutoff) throws IOException {
     DataIndexer indexer = new OnePassDataIndexer();
     TrainingParameters indexingParameters = new TrainingParameters();
-    indexingParameters.put(GISTrainer.CUTOFF_PARAM, cutoff);
-    indexingParameters.put(GISTrainer.ITERATIONS_PARAM, iterations);
+    indexingParameters.put(TrainingParameters.CUTOFF_PARAM, cutoff);
+    indexingParameters.put(TrainingParameters.ITERATIONS_PARAM, iterations);
     Map<String, String> reportMap = new HashMap<>();
     indexer.init(indexingParameters, reportMap);
     indexer.index(eventStream);
diff --git 
a/opennlp-tools/src/main/java/opennlp/tools/ml/model/AbstractDataIndexer.java 
b/opennlp-tools/src/main/java/opennlp/tools/ml/model/AbstractDataIndexer.java
index 017574eb..16fa0243 100644
--- 
a/opennlp-tools/src/main/java/opennlp/tools/ml/model/AbstractDataIndexer.java
+++ 
b/opennlp-tools/src/main/java/opennlp/tools/ml/model/AbstractDataIndexer.java
@@ -30,7 +30,6 @@ import java.util.Objects;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.util.InsufficientTrainingDataException;
 import opennlp.tools.util.ObjectStream;
 import opennlp.tools.util.TrainingParameters;
@@ -45,9 +44,6 @@ public abstract class AbstractDataIndexer implements 
DataIndexer {
 
   private static final Logger logger = 
LoggerFactory.getLogger(AbstractDataIndexer.class);
 
-  public static final String CUTOFF_PARAM = AbstractTrainer.CUTOFF_PARAM;
-  public static final int CUTOFF_DEFAULT = AbstractTrainer.CUTOFF_DEFAULT;
-
   public static final String SORT_PARAM = "sort";
   public static final boolean SORT_DEFAULT = true;
 
diff --git 
a/opennlp-tools/src/main/java/opennlp/tools/ml/model/OnePassDataIndexer.java 
b/opennlp-tools/src/main/java/opennlp/tools/ml/model/OnePassDataIndexer.java
index 8ed24de6..71d29199 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/model/OnePassDataIndexer.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/model/OnePassDataIndexer.java
@@ -28,6 +28,7 @@ import org.slf4j.LoggerFactory;
 
 import opennlp.tools.util.ObjectStream;
 import opennlp.tools.util.ObjectStreamUtils;
+import opennlp.tools.util.TrainingParameters;
 
 /**
  * A {@link DataIndexer} for maxent model data which handles cutoffs for 
uncommon
@@ -48,7 +49,8 @@ public class OnePassDataIndexer extends AbstractDataIndexer {
    */
   @Override
   public void index(ObjectStream<Event> eventStream) throws IOException {
-    int cutoff = trainingParameters.getIntParameter(CUTOFF_PARAM, 
CUTOFF_DEFAULT);
+    int cutoff = 
trainingParameters.getIntParameter(TrainingParameters.CUTOFF_PARAM,
+        TrainingParameters.CUTOFF_DEFAULT_VALUE);
     boolean sort = trainingParameters.getBooleanParameter(SORT_PARAM, 
SORT_DEFAULT);
 
     long start = System.currentTimeMillis();
diff --git 
a/opennlp-tools/src/main/java/opennlp/tools/ml/model/TwoPassDataIndexer.java 
b/opennlp-tools/src/main/java/opennlp/tools/ml/model/TwoPassDataIndexer.java
index 0e49a4bd..005d7663 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/model/TwoPassDataIndexer.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/model/TwoPassDataIndexer.java
@@ -37,6 +37,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import opennlp.tools.util.ObjectStream;
+import opennlp.tools.util.TrainingParameters;
 
 /**
  * Collecting event and context counts by making two passes over the events.
@@ -61,7 +62,8 @@ public class TwoPassDataIndexer extends AbstractDataIndexer {
    */
   @Override
   public void index(ObjectStream<Event> eventStream) throws IOException {
-    int cutoff = trainingParameters.getIntParameter(CUTOFF_PARAM, 
CUTOFF_DEFAULT);
+    int cutoff = 
trainingParameters.getIntParameter(TrainingParameters.CUTOFF_PARAM,
+        TrainingParameters.CUTOFF_DEFAULT_VALUE);
     boolean sort = trainingParameters.getBooleanParameter(SORT_PARAM, 
SORT_DEFAULT);
 
     logger.info("Indexing events with TwoPass using cutoff of {}", cutoff);
diff --git 
a/opennlp-tools/src/main/java/opennlp/tools/ml/perceptron/SimplePerceptronSequenceTrainer.java
 
b/opennlp-tools/src/main/java/opennlp/tools/ml/perceptron/SimplePerceptronSequenceTrainer.java
index 92c2f48e..a58bdbde 100644
--- 
a/opennlp-tools/src/main/java/opennlp/tools/ml/perceptron/SimplePerceptronSequenceTrainer.java
+++ 
b/opennlp-tools/src/main/java/opennlp/tools/ml/perceptron/SimplePerceptronSequenceTrainer.java
@@ -36,6 +36,7 @@ import opennlp.tools.ml.model.OnePassDataIndexer;
 import opennlp.tools.ml.model.Sequence;
 import opennlp.tools.ml.model.SequenceStream;
 import opennlp.tools.ml.model.SequenceStreamEventStream;
+import opennlp.tools.util.TrainingParameters;
 
 /**
  * Trains {@link PerceptronModel models} with sequences using the perceptron 
algorithm.
@@ -145,7 +146,7 @@ public class SimplePerceptronSequenceTrainer extends 
AbstractEventModelSequenceT
    *
    * @param iterations     The number of iterations to use for training.
    * @param sequenceStream The {@link SequenceStream<Event>} used as data 
input.
-   * @param cutoff         The {{@link #CUTOFF_PARAM}} value to use for 
training.
+   * @param cutoff         The {{@link TrainingParameters#CUTOFF_PARAM}} value 
to use for training.
    * @param useAverage     Whether to use 'averaging', or not.
    * @return A valid, trained {@link AbstractModel perceptron model}.
    */
@@ -154,7 +155,7 @@ public class SimplePerceptronSequenceTrainer extends 
AbstractEventModelSequenceT
     this.iterations = iterations;
     this.sequenceStream = sequenceStream;
 
-    trainingParameters.put(AbstractDataIndexer.CUTOFF_PARAM, cutoff);
+    trainingParameters.put(TrainingParameters.CUTOFF_PARAM, cutoff);
     trainingParameters.put(AbstractDataIndexer.SORT_PARAM, false);
     DataIndexer di = new OnePassDataIndexer();
     di.init(trainingParameters, reportMap);
diff --git 
a/opennlp-tools/src/main/java/opennlp/tools/util/TrainingParameters.java 
b/opennlp-tools/src/main/java/opennlp/tools/util/TrainingParameters.java
index 824d6a04..37f5bd92 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/util/TrainingParameters.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/util/TrainingParameters.java
@@ -35,14 +35,21 @@ import opennlp.tools.ml.EventTrainer;
  */
 public class TrainingParameters {
 
-  // TODO: are them duplicated?
   public static final String ALGORITHM_PARAM = "Algorithm";
   public static final String TRAINER_TYPE_PARAM = "TrainerType";
 
   public static final String ITERATIONS_PARAM = "Iterations";
   public static final String CUTOFF_PARAM = "Cutoff";
   public static final String THREADS_PARAM = "Threads";
+
+  /**
+   * The default number of iterations is 100.
+   */
   public static final int ITERATIONS_DEFAULT_VALUE = 100;
+
+  /**
+   * The default cut off value is 5.
+   */
   public static final int CUTOFF_DEFAULT_VALUE = 5;
 
   private final Map<String, Object> parameters = new 
TreeMap<>(String.CASE_INSENSITIVE_ORDER);
@@ -96,21 +103,6 @@ public class TrainingParameters {
     return (String)parameters.get(ALGORITHM_PARAM);
   }
 
-  private static String getStringValue(Object value) {
-    if (value instanceof Integer) {
-      return Integer.toString((Integer)value);
-    }
-    else if (value instanceof Double) {
-      return Double.toString((Double)value);
-    }
-    else if (value instanceof Boolean) {
-      return Boolean.toString((Boolean)value);
-    }
-    else {
-      return (String)value;
-    }
-  }
-
   /**
    * @param namespace The name space to filter or narrow the search space. May 
be {@code null}.
    *
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentCategorizerNBTest.java
 
b/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentCategorizerNBTest.java
index 4c3fd562..b822ec5a 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentCategorizerNBTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/doccat/DocumentCategorizerNBTest.java
@@ -24,7 +24,6 @@ import java.util.SortedMap;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.naivebayes.NaiveBayesTrainer;
 import opennlp.tools.util.ObjectStream;
 import opennlp.tools.util.ObjectStreamUtils;
@@ -44,9 +43,9 @@ public class DocumentCategorizerNBTest {
         new DocumentSample("0", new String[] {"x", "y", "z", "7", "8"}));
 
     TrainingParameters params = new TrainingParameters();
-    params.put(TrainingParameters.ITERATIONS_PARAM, 100);
+    params.put(TrainingParameters.ITERATIONS_PARAM, 
TrainingParameters.ITERATIONS_DEFAULT_VALUE);
     params.put(TrainingParameters.CUTOFF_PARAM, 0);
-    params.put(AbstractTrainer.ALGORITHM_PARAM, 
NaiveBayesTrainer.NAIVE_BAYES_VALUE);
+    params.put(TrainingParameters.ALGORITHM_PARAM, 
NaiveBayesTrainer.NAIVE_BAYES_VALUE);
 
     DoccatModel model = DocumentCategorizerME.train("x-unspecified", samples,
         params, new DoccatFactory());
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/TrainerFactoryTest.java 
b/opennlp-tools/src/test/java/opennlp/tools/ml/TrainerFactoryTest.java
index 9e6c8e0d..a8f1224a 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/TrainerFactoryTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/TrainerFactoryTest.java
@@ -63,7 +63,7 @@ public class TrainerFactoryTest {
 
   @Test
   void testIsSequenceTrainerTrue() {
-    mlParams.put(AbstractTrainer.ALGORITHM_PARAM,
+    mlParams.put(TrainingParameters.ALGORITHM_PARAM,
         SimplePerceptronSequenceTrainer.PERCEPTRON_SEQUENCE_VALUE);
 
     TrainerType trainerType = TrainerFactory.getTrainerType(mlParams);
@@ -73,7 +73,7 @@ public class TrainerFactoryTest {
 
   @Test
   void testIsSequenceTrainerFalse() {
-    mlParams.put(AbstractTrainer.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
+    mlParams.put(TrainingParameters.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
     TrainerType trainerType = TrainerFactory.getTrainerType(mlParams);
     Assertions.assertNotEquals(TrainerType.EVENT_MODEL_SEQUENCE_TRAINER, 
trainerType);
   }
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/GISIndexingTest.java 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/GISIndexingTest.java
index 912d30f5..fa1f18cf 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/GISIndexingTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/GISIndexingTest.java
@@ -27,7 +27,6 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 
 import opennlp.tools.ml.AbstractEventTrainer;
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.EventTrainer;
 import opennlp.tools.ml.TrainerFactory;
 import opennlp.tools.ml.maxent.quasinewton.QNTrainer;
@@ -64,7 +63,7 @@ public class GISIndexingTest {
   void testGISTrainSignature1() throws IOException {
     try (ObjectStream<Event> eventStream = createEventStream()) {
       TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
-      params.put(AbstractTrainer.CUTOFF_PARAM, 1);
+      params.put(TrainingParameters.CUTOFF_PARAM, 1);
 
       EventTrainer trainer = TrainerFactory.getEventTrainer(params, null);
 
@@ -79,7 +78,7 @@ public class GISIndexingTest {
   void testGISTrainSignature2() throws IOException {
     try (ObjectStream<Event> eventStream = createEventStream()) {
       TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
-      params.put(AbstractTrainer.CUTOFF_PARAM, 1);
+      params.put(TrainingParameters.CUTOFF_PARAM, 1);
       params.put("smoothing", true);
       EventTrainer trainer = TrainerFactory.getEventTrainer(params, null);
 
@@ -95,8 +94,8 @@ public class GISIndexingTest {
     try (ObjectStream<Event> eventStream = createEventStream()) {
       TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
 
-      params.put(AbstractTrainer.ITERATIONS_PARAM, 10);
-      params.put(AbstractTrainer.CUTOFF_PARAM, 1);
+      params.put(TrainingParameters.ITERATIONS_PARAM, 10);
+      params.put(TrainingParameters.CUTOFF_PARAM, 1);
 
       EventTrainer trainer = TrainerFactory.getEventTrainer(params, null);
 
@@ -111,8 +110,8 @@ public class GISIndexingTest {
   void testGISTrainSignature4() throws IOException {
     try (ObjectStream<Event> eventStream = createEventStream()) {
       TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
-      params.put(AbstractTrainer.ITERATIONS_PARAM, 10);
-      params.put(AbstractTrainer.CUTOFF_PARAM, 1);
+      params.put(TrainingParameters.ITERATIONS_PARAM, 10);
+      params.put(TrainingParameters.CUTOFF_PARAM, 1);
       GISTrainer trainer = (GISTrainer) TrainerFactory.getEventTrainer(params, 
null);
       trainer.setGaussianSigma(0.01);
 
@@ -129,8 +128,8 @@ public class GISIndexingTest {
     try (ObjectStream<Event> eventStream = createEventStream()) {
       TrainingParameters params = ModelUtil.createDefaultTrainingParameters();
 
-      params.put(AbstractTrainer.ITERATIONS_PARAM, 10);
-      params.put(AbstractTrainer.CUTOFF_PARAM, 1);
+      params.put(TrainingParameters.ITERATIONS_PARAM, 10);
+      params.put(TrainingParameters.CUTOFF_PARAM, 1);
       params.put("smoothing", false);
 
       EventTrainer trainer = TrainerFactory.getEventTrainer(params, null);
@@ -146,7 +145,7 @@ public class GISIndexingTest {
     // by default we are using GIS/EventTrainer/Cutoff of 5/100 iterations
     parameters.put(TrainingParameters.ITERATIONS_PARAM, 10);
     parameters.put(AbstractEventTrainer.DATA_INDEXER_PARAM, 
AbstractEventTrainer.DATA_INDEXER_ONE_PASS_VALUE);
-    parameters.put(AbstractEventTrainer.CUTOFF_PARAM, 1);
+    parameters.put(TrainingParameters.CUTOFF_PARAM, 1);
     // note: setting the SORT_PARAM to true is the default, so it is not 
really needed
     parameters.put(AbstractDataIndexer.SORT_PARAM, true);
 
@@ -168,7 +167,7 @@ public class GISIndexingTest {
 
     parameters.put(TrainingParameters.ALGORITHM_PARAM, 
QNTrainer.MAXENT_QN_VALUE);
     parameters.put(AbstractEventTrainer.DATA_INDEXER_PARAM, 
AbstractEventTrainer.DATA_INDEXER_TWO_PASS_VALUE);
-    parameters.put(AbstractEventTrainer.CUTOFF_PARAM, 2);
+    parameters.put(TrainingParameters.CUTOFF_PARAM, 2);
 
     trainer = TrainerFactory.getEventTrainer(parameters, new HashMap<>());
     Assertions.assertEquals("opennlp.tools.ml.maxent.quasinewton.QNTrainer", 
trainer.getClass().getName());
@@ -186,7 +185,7 @@ public class GISIndexingTest {
 
     // set the cutoff to 1 for this test.
     TrainingParameters parameters = new TrainingParameters();
-    parameters.put(AbstractDataIndexer.CUTOFF_PARAM, 1);
+    parameters.put(TrainingParameters.CUTOFF_PARAM, 1);
 
     // test with a 1 pass data indexer...
     parameters.put(AbstractEventTrainer.DATA_INDEXER_PARAM, 
AbstractEventTrainer.DATA_INDEXER_ONE_PASS_VALUE);
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/MaxentPrepAttachTest.java 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/MaxentPrepAttachTest.java
index 9cc31704..3d379780 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/MaxentPrepAttachTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/MaxentPrepAttachTest.java
@@ -24,7 +24,6 @@ import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
 import opennlp.tools.ml.AbstractEventTrainer;
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.EventTrainer;
 import opennlp.tools.ml.PrepAttachDataUtil;
 import opennlp.tools.ml.TrainerFactory;
@@ -43,7 +42,7 @@ public class MaxentPrepAttachTest {
   @BeforeEach
   void initIndexer() {
     TrainingParameters trainingParameters = new TrainingParameters();
-    trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainingParameters.put(TrainingParameters.CUTOFF_PARAM, 1);
     trainingParameters.put(AbstractDataIndexer.SORT_PARAM, false);
     testDataIndexer = new TwoPassDataIndexer();
     testDataIndexer.init(trainingParameters, new HashMap<>());
@@ -76,10 +75,10 @@ public class MaxentPrepAttachTest {
   void testMaxentOnPrepAttachDataWithParams() throws IOException {
 
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
GISTrainer.MAXENT_VALUE);
     trainParams.put(AbstractEventTrainer.DATA_INDEXER_PARAM,
         AbstractEventTrainer.DATA_INDEXER_TWO_PASS_VALUE);
-    trainParams.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainParams.put(TrainingParameters.CUTOFF_PARAM, 1);
 
     EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams, null);
     MaxentModel model = 
trainer.train(PrepAttachDataUtil.createTrainingStream());
@@ -91,7 +90,7 @@ public class MaxentPrepAttachTest {
   void testMaxentOnPrepAttachDataWithParamsDefault() throws IOException {
 
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
GISTrainer.MAXENT_VALUE);
 
     EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams, null);
     MaxentModel model = 
trainer.train(PrepAttachDataUtil.createTrainingStream());
@@ -102,7 +101,7 @@ public class MaxentPrepAttachTest {
   @Test
   void testMaxentOnPrepAttachDataWithParamsLLThreshold() throws IOException {
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, GISTrainer.MAXENT_VALUE);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
GISTrainer.MAXENT_VALUE);
     trainParams.put(GISTrainer.LOG_LIKELIHOOD_THRESHOLD_PARAM, 5.);
 
     EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams, null);
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/RealValueModelTest.java 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/RealValueModelTest.java
index 28b98f5a..a26b1ac4 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/RealValueModelTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/RealValueModelTest.java
@@ -24,7 +24,6 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.model.DataIndexer;
 import opennlp.tools.ml.model.FileEventStream;
 import opennlp.tools.ml.model.OnePassRealValueDataIndexer;
@@ -39,7 +38,7 @@ public class RealValueModelTest {
   @BeforeEach
   void initIndexer() {
     TrainingParameters trainingParameters = new TrainingParameters();
-    trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainingParameters.put(TrainingParameters.CUTOFF_PARAM, 1);
     testDataIndexer = new OnePassRealValueDataIndexer();
     testDataIndexer.init(trainingParameters, new HashMap<>());
   }
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/ScaleDoesntMatterTest.java
 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/ScaleDoesntMatterTest.java
index 5869562b..466ff6aa 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/ScaleDoesntMatterTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/ScaleDoesntMatterTest.java
@@ -24,7 +24,6 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.EventTrainer;
 import opennlp.tools.ml.TrainerFactory;
 import opennlp.tools.ml.model.DataIndexer;
@@ -45,7 +44,7 @@ public class ScaleDoesntMatterTest {
   @BeforeEach
   void initIndexer() {
     TrainingParameters trainingParameters = new TrainingParameters();
-    trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 0);
+    trainingParameters.put(TrainingParameters.CUTOFF_PARAM, 0);
     testDataIndexer = new OnePassRealValueDataIndexer();
     testDataIndexer.init(trainingParameters, new HashMap<>());
   }
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/io/RealValueFileEventStreamTest.java
 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/io/RealValueFileEventStreamTest.java
index 08f2e8e5..a5a64d7d 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/io/RealValueFileEventStreamTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/io/RealValueFileEventStreamTest.java
@@ -24,7 +24,6 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.model.DataIndexer;
 import opennlp.tools.ml.model.OnePassRealValueDataIndexer;
 import opennlp.tools.ml.model.RealValueFileEventStream;
@@ -37,7 +36,7 @@ public class RealValueFileEventStreamTest {
   @BeforeEach
   void initIndexer() {
     TrainingParameters trainingParameters = new TrainingParameters();
-    trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainingParameters.put(TrainingParameters.CUTOFF_PARAM, 1);
     indexer = new OnePassRealValueDataIndexer();
     indexer.init(trainingParameters, new HashMap<>());
   }
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihoodTest.java
 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihoodTest.java
index 7a031310..155aed22 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihoodTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihoodTest.java
@@ -27,7 +27,6 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.model.DataIndexer;
 import opennlp.tools.ml.model.OnePassRealValueDataIndexer;
 import opennlp.tools.ml.model.RealValueFileEventStream;
@@ -42,7 +41,7 @@ public class NegLogLikelihoodTest {
   @BeforeEach
   void initIndexer() {
     TrainingParameters trainingParameters = new TrainingParameters();
-    trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainingParameters.put(TrainingParameters.CUTOFF_PARAM, 1);
     testDataIndexer = new OnePassRealValueDataIndexer();
     testDataIndexer.init(trainingParameters, new HashMap<>());
   }
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/QNPrepAttachTest.java
 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/QNPrepAttachTest.java
index 82d8afb4..5dd7c2cf 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/QNPrepAttachTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/QNPrepAttachTest.java
@@ -23,7 +23,6 @@ import java.util.HashMap;
 import org.junit.jupiter.api.Test;
 
 import opennlp.tools.ml.AbstractEventTrainer;
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.PrepAttachDataUtil;
 import opennlp.tools.ml.TrainerFactory;
 import opennlp.tools.ml.model.AbstractDataIndexer;
@@ -39,7 +38,7 @@ public class QNPrepAttachTest {
   void testQNOnPrepAttachData() throws IOException {
     DataIndexer indexer = new TwoPassDataIndexer();
     TrainingParameters indexingParameters = new TrainingParameters();
-    indexingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    indexingParameters.put(TrainingParameters.CUTOFF_PARAM, 1);
     indexingParameters.put(AbstractDataIndexer.SORT_PARAM, false);
     indexer.init(indexingParameters, new HashMap<>());
     indexer.index(PrepAttachDataUtil.createTrainingStream());
@@ -53,7 +52,7 @@ public class QNPrepAttachTest {
   void testQNOnPrepAttachDataWithParamsDefault() throws IOException {
 
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
QNTrainer.MAXENT_QN_VALUE);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
QNTrainer.MAXENT_QN_VALUE);
 
     MaxentModel model = TrainerFactory.getEventTrainer(trainParams, null)
         .train(PrepAttachDataUtil.createTrainingStream());
@@ -65,10 +64,10 @@ public class QNPrepAttachTest {
   void testQNOnPrepAttachDataWithElasticNetParams() throws IOException {
 
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
QNTrainer.MAXENT_QN_VALUE);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
QNTrainer.MAXENT_QN_VALUE);
     trainParams.put(AbstractEventTrainer.DATA_INDEXER_PARAM,
         AbstractEventTrainer.DATA_INDEXER_TWO_PASS_VALUE);
-    trainParams.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainParams.put(TrainingParameters.CUTOFF_PARAM, 1);
     trainParams.put(QNTrainer.L1COST_PARAM, 0.25);
     trainParams.put(QNTrainer.L2COST_PARAM, 1.0D);
 
@@ -82,10 +81,10 @@ public class QNPrepAttachTest {
   void testQNOnPrepAttachDataWithL1Params() throws IOException {
 
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
QNTrainer.MAXENT_QN_VALUE);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
QNTrainer.MAXENT_QN_VALUE);
     trainParams.put(AbstractEventTrainer.DATA_INDEXER_PARAM,
         AbstractEventTrainer.DATA_INDEXER_TWO_PASS_VALUE);
-    trainParams.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainParams.put(TrainingParameters.CUTOFF_PARAM, 1);
     trainParams.put(QNTrainer.L1COST_PARAM, 1.0D);
     trainParams.put(QNTrainer.L2COST_PARAM, 0D);
 
@@ -99,10 +98,10 @@ public class QNPrepAttachTest {
   void testQNOnPrepAttachDataWithL2Params() throws IOException {
 
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
QNTrainer.MAXENT_QN_VALUE);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
QNTrainer.MAXENT_QN_VALUE);
     trainParams.put(AbstractEventTrainer.DATA_INDEXER_PARAM,
         AbstractEventTrainer.DATA_INDEXER_TWO_PASS_VALUE);
-    trainParams.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainParams.put(TrainingParameters.CUTOFF_PARAM, 1);
     trainParams.put(QNTrainer.L1COST_PARAM, 0D);
     trainParams.put(QNTrainer.L2COST_PARAM, 1.0D);
 
@@ -116,7 +115,7 @@ public class QNPrepAttachTest {
   void testQNOnPrepAttachDataInParallel() throws IOException {
 
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
QNTrainer.MAXENT_QN_VALUE);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
QNTrainer.MAXENT_QN_VALUE);
     trainParams.put(QNTrainer.THREADS_PARAM, 2);
 
     MaxentModel model = TrainerFactory.getEventTrainer(trainParams, null)
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/QNTrainerTest.java
 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/QNTrainerTest.java
index 8751c02a..ac323f1a 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/QNTrainerTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/QNTrainerTest.java
@@ -27,7 +27,6 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.model.AbstractModel;
 import opennlp.tools.ml.model.BinaryFileDataReader;
 import opennlp.tools.ml.model.DataIndexer;
@@ -46,7 +45,7 @@ public class QNTrainerTest {
   @BeforeEach
   void initIndexer() {
     TrainingParameters trainingParameters = new TrainingParameters();
-    trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainingParameters.put(TrainingParameters.CUTOFF_PARAM, 1);
     testDataIndexer = new OnePassRealValueDataIndexer();
     testDataIndexer.init(trainingParameters, new HashMap<>());
   }
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesCorrectnessTest.java
 
b/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesCorrectnessTest.java
index 0aacb370..2c837744 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesCorrectnessTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesCorrectnessTest.java
@@ -27,7 +27,6 @@ import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.Arguments;
 import org.junit.jupiter.params.provider.MethodSource;
 
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.model.AbstractDataIndexer;
 import opennlp.tools.ml.model.DataIndexer;
 import opennlp.tools.ml.model.Event;
@@ -45,7 +44,7 @@ public class NaiveBayesCorrectnessTest extends 
AbstractNaiveBayesTest {
   @BeforeEach
   void initIndexer() throws IOException {
     TrainingParameters trainingParameters = new TrainingParameters();
-    trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainingParameters.put(TrainingParameters.CUTOFF_PARAM, 1);
     trainingParameters.put(AbstractDataIndexer.SORT_PARAM, false);
     testDataIndexer = new TwoPassDataIndexer();
     testDataIndexer.init(trainingParameters, new HashMap<>());
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesModelReadWriteTest.java
 
b/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesModelReadWriteTest.java
index 6c135c9d..2aabbfc3 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesModelReadWriteTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesModelReadWriteTest.java
@@ -27,7 +27,6 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.model.AbstractDataIndexer;
 import opennlp.tools.ml.model.AbstractModel;
 import opennlp.tools.ml.model.DataIndexer;
@@ -44,7 +43,7 @@ public class NaiveBayesModelReadWriteTest extends 
AbstractNaiveBayesTest {
   @BeforeEach
   void initIndexer() throws IOException {
     TrainingParameters trainingParameters = new TrainingParameters();
-    trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainingParameters.put(TrainingParameters.CUTOFF_PARAM, 1);
     trainingParameters.put(AbstractDataIndexer.SORT_PARAM, false);
     testDataIndexer = new TwoPassDataIndexer();
     testDataIndexer.init(trainingParameters, new HashMap<>());
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesPrepAttachTest.java
 
b/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesPrepAttachTest.java
index 954aa840..aea7e4b2 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesPrepAttachTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesPrepAttachTest.java
@@ -24,7 +24,6 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.EventTrainer;
 import opennlp.tools.ml.PrepAttachDataUtil;
 import opennlp.tools.ml.TrainerFactory;
@@ -52,7 +51,7 @@ public class NaiveBayesPrepAttachTest {
   @Test
   void testNaiveBayesOnPrepAttachData() throws IOException {
     TrainingParameters trainingParameters = new TrainingParameters();
-    trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainingParameters.put(TrainingParameters.CUTOFF_PARAM, 1);
     trainingParameters.put(AbstractDataIndexer.SORT_PARAM, false);
     DataIndexer testDataIndexer = new TwoPassDataIndexer();
     testDataIndexer.init(trainingParameters, new HashMap<>());
@@ -66,8 +65,8 @@ public class NaiveBayesPrepAttachTest {
   @Test
   void testNaiveBayesOnPrepAttachDataUsingTrainUtil() throws IOException {
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
NaiveBayesTrainer.NAIVE_BAYES_VALUE);
-    trainParams.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
NaiveBayesTrainer.NAIVE_BAYES_VALUE);
+    trainParams.put(TrainingParameters.CUTOFF_PARAM, 1);
 
     EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams, null);
     MaxentModel model = trainer.train(trainingStream);
@@ -78,8 +77,8 @@ public class NaiveBayesPrepAttachTest {
   @Test
   void testNaiveBayesOnPrepAttachDataUsingTrainUtilWithCutoff5() throws 
IOException {
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
NaiveBayesTrainer.NAIVE_BAYES_VALUE);
-    trainParams.put(AbstractTrainer.CUTOFF_PARAM, 5);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
NaiveBayesTrainer.NAIVE_BAYES_VALUE);
+    trainParams.put(TrainingParameters.CUTOFF_PARAM, 5);
 
     EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams, null);
     MaxentModel model = trainer.train(trainingStream);
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesSerializedCorrectnessTest.java
 
b/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesSerializedCorrectnessTest.java
index e0abd823..65869813 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesSerializedCorrectnessTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/ml/naivebayes/NaiveBayesSerializedCorrectnessTest.java
@@ -35,7 +35,6 @@ import org.junit.jupiter.params.ParameterizedTest;
 import org.junit.jupiter.params.provider.Arguments;
 import org.junit.jupiter.params.provider.MethodSource;
 
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.model.AbstractDataIndexer;
 import opennlp.tools.ml.model.DataIndexer;
 import opennlp.tools.ml.model.Event;
@@ -52,7 +51,7 @@ public class NaiveBayesSerializedCorrectnessTest extends 
AbstractNaiveBayesTest
   @BeforeEach
   void initIndexer() throws IOException {
     TrainingParameters trainingParameters = new TrainingParameters();
-    trainingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainingParameters.put(TrainingParameters.CUTOFF_PARAM, 1);
     trainingParameters.put(AbstractDataIndexer.SORT_PARAM, false);
     testDataIndexer = new TwoPassDataIndexer();
     testDataIndexer.init(trainingParameters, new HashMap<>());
diff --git 
a/opennlp-tools/src/test/java/opennlp/tools/ml/perceptron/PerceptronPrepAttachTest.java
 
b/opennlp-tools/src/test/java/opennlp/tools/ml/perceptron/PerceptronPrepAttachTest.java
index 41c50dee..1b3db1a8 100644
--- 
a/opennlp-tools/src/test/java/opennlp/tools/ml/perceptron/PerceptronPrepAttachTest.java
+++ 
b/opennlp-tools/src/test/java/opennlp/tools/ml/perceptron/PerceptronPrepAttachTest.java
@@ -28,7 +28,6 @@ import java.util.Map;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 
-import opennlp.tools.ml.AbstractTrainer;
 import opennlp.tools.ml.EventTrainer;
 import opennlp.tools.ml.PrepAttachDataUtil;
 import opennlp.tools.ml.TrainerFactory;
@@ -47,7 +46,7 @@ public class PerceptronPrepAttachTest {
   void testPerceptronOnPrepAttachData() throws IOException {
     TwoPassDataIndexer indexer = new TwoPassDataIndexer();
     TrainingParameters indexingParameters = new TrainingParameters();
-    indexingParameters.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    indexingParameters.put(TrainingParameters.CUTOFF_PARAM, 1);
     indexingParameters.put(AbstractDataIndexer.SORT_PARAM, false);
     indexer.init(indexingParameters, new HashMap<>());
     indexer.index(PrepAttachDataUtil.createTrainingStream());
@@ -59,8 +58,8 @@ public class PerceptronPrepAttachTest {
   void testPerceptronOnPrepAttachDataWithSkippedAveraging() throws IOException 
{
 
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
PerceptronTrainer.PERCEPTRON_VALUE);
-    trainParams.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
PerceptronTrainer.PERCEPTRON_VALUE);
+    trainParams.put(TrainingParameters.CUTOFF_PARAM, 1);
     trainParams.put("UseSkippedAveraging", true);
 
     EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams, null);
@@ -72,9 +71,9 @@ public class PerceptronPrepAttachTest {
   void testPerceptronOnPrepAttachDataWithTolerance() throws IOException {
 
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
PerceptronTrainer.PERCEPTRON_VALUE);
-    trainParams.put(AbstractTrainer.CUTOFF_PARAM, 1);
-    trainParams.put(AbstractTrainer.ITERATIONS_PARAM, 500);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
PerceptronTrainer.PERCEPTRON_VALUE);
+    trainParams.put(TrainingParameters.CUTOFF_PARAM, 1);
+    trainParams.put(TrainingParameters.ITERATIONS_PARAM, 500);
     trainParams.put("Tolerance", 0.0001d);
 
     EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams, null);
@@ -86,9 +85,9 @@ public class PerceptronPrepAttachTest {
   void testPerceptronOnPrepAttachDataWithStepSizeDecrease() throws IOException 
{
 
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
PerceptronTrainer.PERCEPTRON_VALUE);
-    trainParams.put(AbstractTrainer.CUTOFF_PARAM, 1);
-    trainParams.put(AbstractTrainer.ITERATIONS_PARAM, 500);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
PerceptronTrainer.PERCEPTRON_VALUE);
+    trainParams.put(TrainingParameters.CUTOFF_PARAM, 1);
+    trainParams.put(TrainingParameters.ITERATIONS_PARAM, 500);
     trainParams.put("StepSizeDecrease", 0.06d);
 
     EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams, null);
@@ -100,8 +99,8 @@ public class PerceptronPrepAttachTest {
   void testModelSerialization() throws IOException {
 
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
PerceptronTrainer.PERCEPTRON_VALUE);
-    trainParams.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
PerceptronTrainer.PERCEPTRON_VALUE);
+    trainParams.put(TrainingParameters.CUTOFF_PARAM, 1);
     trainParams.put("UseSkippedAveraging", true);
 
     EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams, null);
@@ -124,8 +123,8 @@ public class PerceptronPrepAttachTest {
   @Test
   void testModelEquals() throws IOException {
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
PerceptronTrainer.PERCEPTRON_VALUE);
-    trainParams.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
PerceptronTrainer.PERCEPTRON_VALUE);
+    trainParams.put(TrainingParameters.CUTOFF_PARAM, 1);
     trainParams.put("UseSkippedAveraging", true);
 
     EventTrainer trainer = TrainerFactory.getEventTrainer(trainParams, null);
@@ -139,10 +138,10 @@ public class PerceptronPrepAttachTest {
   @Test
   void verifyReportMap() throws IOException {
     TrainingParameters trainParams = new TrainingParameters();
-    trainParams.put(AbstractTrainer.ALGORITHM_PARAM, 
PerceptronTrainer.PERCEPTRON_VALUE);
-    trainParams.put(AbstractTrainer.CUTOFF_PARAM, 1);
+    trainParams.put(TrainingParameters.ALGORITHM_PARAM, 
PerceptronTrainer.PERCEPTRON_VALUE);
+    trainParams.put(TrainingParameters.CUTOFF_PARAM, 1);
     // Since we are verifying the report map, we don't need to have more than 
1 iteration
-    trainParams.put(AbstractTrainer.ITERATIONS_PARAM, 1);
+    trainParams.put(TrainingParameters.ITERATIONS_PARAM, 1);
     trainParams.put("UseSkippedAveraging", true);
 
     Map<String, String> reportMap = new HashMap<>();

Reply via email to