This is an automated email from the ASF dual-hosted git repository.
sergeykamov pushed a commit to branch NLPCRAFT-504
in repository https://gitbox.apache.org/repos/asf/incubator-nlpcraft.git
The following commit(s) were added to refs/heads/NLPCRAFT-504 by this push:
new 54f0edba Assertion fixes.
54f0edba is described below
commit 54f0edba10a155bc0a9f1678ba8f362dd192dfeb
Author: Sergey Kamov <[email protected]>
AuthorDate: Mon Jul 4 11:14:41 2022 +0300
Assertion fixes.
---
.../stanford/NCStanfordNLPEntityParser.scala | 4 +-
.../parser/stanford/NCStanfordNLPTokenParser.scala | 2 +-
.../scala/org/apache/nlpcraft/NCModelClient.scala | 12 ++---
.../org/apache/nlpcraft/NCPipelineBuilder.scala | 60 +++++++++++-----------
.../nlp/entity/parser/NCOpenNLPEntityParser.scala | 4 +-
.../parser/semantic/NCSemanticEntityParser.scala | 20 ++++----
.../enricher/NCEnSwearWordsTokenEnricher.scala | 2 +-
.../nlp/token/parser/NCOpenNLPTokenParser.scala | 2 +-
8 files changed, 53 insertions(+), 53 deletions(-)
diff --git
a/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParser.scala
b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParser.scala
index 2426fcda..bbd1ff34 100644
---
a/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParser.scala
+++
b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/stanford/NCStanfordNLPEntityParser.scala
@@ -31,8 +31,8 @@ import scala.jdk.CollectionConverters.*
* @param supported
*/
class NCStanfordNLPEntityParser(stanford: StanfordCoreNLP, supported:
Set[String]) extends NCEntityParser:
- Objects.requireNonNull(stanford, "Stanford instance cannot be null.");
- Objects.requireNonNull(supported, "Supported elements set cannot be
null.");
+ require(stanford != null, "Stanford instance cannot be null.");
+ require(supported != null, "Supported elements set cannot be null.");
override def parse(req: NCRequest, cfg: NCModelConfig, toks:
List[NCToken]): List[NCEntity] =
val doc = new CoreDocument(req.getText)
diff --git
a/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParser.scala
b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParser.scala
index 0af6d272..408a240a 100644
---
a/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParser.scala
+++
b/nlpcraft-stanford/src/main/scala/org/apache/nlpcraft/nlp/token/parser/stanford/NCStanfordNLPTokenParser.scala
@@ -33,7 +33,7 @@ import scala.jdk.CollectionConverters.*
* @param stanford
*/
class NCStanfordNLPTokenParser(stanford: StanfordCoreNLP) extends
NCTokenParser:
- Objects.requireNonNull(stanford, "Stanford instance cannot be null.")
+ require(stanford != null, "Stanford instance cannot be null.")
private def nvl(v: String, dflt : => String): String = if v != null then v
else dflt
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.scala
b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.scala
index 423b52a0..68d53092 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.scala
@@ -51,16 +51,16 @@ class NCModelClient(mdl: NCModel) extends LazyLogging,
AutoCloseable:
*
*/
private def verify(): Unit =
- Objects.requireNonNull(mdl, "Model cannot be null.")
+ require(mdl != null, "Model cannot be null.")
val cfg = mdl.getConfig
val pipeline = mdl.getPipeline
- Objects.requireNonNull(cfg.id, "Model ID cannot be null.")
- Objects.requireNonNull(cfg.name, "Model name cannot be null.")
- Objects.requireNonNull(cfg.version, "Model version cannot be null.")
- Objects.requireNonNull(pipeline.getTokenParser, "Token parser cannot
be null.")
- Objects.requireNonNull(pipeline.getEntityParsers, "List of entity
parsers in the pipeline cannot be null.")
+ require(cfg.id != null, "Model ID cannot be null.")
+ require(cfg.name != null, "Model name cannot be null.")
+ require(cfg.version != null, "Model version cannot be null.")
+ require(pipeline.getTokenParser != null, "Token parser cannot be
null.")
+ require(pipeline.getEntityParsers != null, "List of entity parsers in
the pipeline cannot be null.")
/**
*
diff --git
a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipelineBuilder.scala
b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipelineBuilder.scala
index d356d053..b710fd8d 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipelineBuilder.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPipelineBuilder.scala
@@ -52,8 +52,8 @@ class NCPipelineBuilder:
* @param tokEnrichers
* @return This instance for call chaining. */
def withTokenEnrichers(tokEnrichers: List[NCTokenEnricher]):
NCPipelineBuilder =
- Objects.requireNonNull(tokEnrichers, "List of token enrichers cannot
be null.")
- tokEnrichers.foreach((p: NCTokenEnricher) => Objects.requireNonNull(p,
"Token enricher cannot be null."))
+ require(tokEnrichers != null, "List of token enrichers cannot be
null.")
+ tokEnrichers.foreach((p: NCTokenEnricher) => require(p != null, "Token
enricher cannot be null."))
this.tokEnrichers ++= tokEnrichers
this
@@ -61,7 +61,7 @@ class NCPipelineBuilder:
* @param tokEnricher
* @return This instance for call chaining. */
def withTokenEnricher(tokEnricher: NCTokenEnricher): NCPipelineBuilder =
- Objects.requireNonNull(tokEnricher, "Token enricher cannot be null.")
+ require(tokEnricher != null, "Token enricher cannot be null.")
this.tokEnrichers += tokEnricher
this
@@ -69,8 +69,8 @@ class NCPipelineBuilder:
* @param entEnrichers
* @return This instance for call chaining. */
def withEntityEnrichers(entEnrichers: List[NCEntityEnricher]):
NCPipelineBuilder =
- Objects.requireNonNull(entEnrichers, "List of entity enrichers cannot
be null.")
- entEnrichers.foreach((p: NCEntityEnricher) =>
Objects.requireNonNull(p, "Entity enrichers cannot be null."))
+ require(entEnrichers != null, "List of entity enrichers cannot be
null.")
+ entEnrichers.foreach((p: NCEntityEnricher) => require(p != null,
"Entity enrichers cannot be null."))
this.entEnrichers ++= entEnrichers
this
@@ -78,7 +78,7 @@ class NCPipelineBuilder:
* @param entEnricher
* @return This instance for call chaining. */
def withEntityEnricher(entEnricher: NCEntityEnricher): NCPipelineBuilder =
- Objects.requireNonNull(entEnricher, "Entity enricher cannot be null.")
+ require(entEnricher != null, "Entity enricher cannot be null.")
this.entEnrichers += entEnricher
this
@@ -86,8 +86,8 @@ class NCPipelineBuilder:
* @param entParsers
* @return This instance for call chaining. */
def withEntityParsers(entParsers: List[NCEntityParser]): NCPipelineBuilder
=
- Objects.requireNonNull(entParsers, "List of entity parsers cannot be
null.")
- entParsers.foreach((p: NCEntityParser) => Objects.requireNonNull(p,
"Entity parser cannot be null."))
+ require(entParsers != null, "List of entity parsers cannot be null.")
+ entParsers.foreach((p: NCEntityParser) => require(p != null, "Entity
parser cannot be null."))
this.entParsers ++= entParsers
this
@@ -95,7 +95,7 @@ class NCPipelineBuilder:
* @param entParser
* @return This instance for call chaining. */
def withEntityParser(entParser: NCEntityParser): NCPipelineBuilder =
- Objects.requireNonNull(entParser, "Entity parser cannot be null.")
+ require(entParser != null, "Entity parser cannot be null.")
this.entParsers += entParser
this
@@ -103,8 +103,8 @@ class NCPipelineBuilder:
* @param tokVals
* @return This instance for call chaining. */
def withTokenValidators(tokVals: List[NCTokenValidator]):
NCPipelineBuilder =
- Objects.requireNonNull(tokVals, "List of token validators cannot be
null.")
- tokVals.foreach((p: NCTokenValidator) => Objects.requireNonNull(p,
"Token validator cannot be null."))
+ require(tokVals != null, "List of token validators cannot be null.")
+ tokVals.foreach((p: NCTokenValidator) => require(p != null, "Token
validator cannot be null."))
this.tokVals ++= tokVals
this
@@ -113,7 +113,7 @@ class NCPipelineBuilder:
* @param tokVal
* @return This instance for call chaining. */
def withTokenValidator(tokVal: NCTokenValidator): NCPipelineBuilder =
- Objects.requireNonNull(tokVal, "Token validator cannot be null.")
+ require(tokVal != null, "Token validator cannot be null.")
this.tokVals += tokVal
this
@@ -121,8 +121,8 @@ class NCPipelineBuilder:
* @param entVals
* @return This instance for call chaining. */
def withEntityValidators(entVals: List[NCEntityValidator]):
NCPipelineBuilder =
- Objects.requireNonNull(entVals, "List of entity validators cannot be
null.")
- entVals.foreach((p: NCEntityValidator) => Objects.requireNonNull(p,
"Entity validators cannot be null."))
+ require(entVals != null, "List of entity validators cannot be null.")
+ entVals.foreach((p: NCEntityValidator) => require(p != null, "Entity
validators cannot be null."))
this.entVals ++= entVals
this
@@ -130,7 +130,7 @@ class NCPipelineBuilder:
* @param entVal
* @return This instance for call chaining. */
def withEntityValidator(entVal: NCEntityValidator): NCPipelineBuilder =
- Objects.requireNonNull(entVal, "Entity validator cannot be null.")
+ require(entVal != null, "Entity validator cannot be null.")
this.entVals += entVal
this
@@ -146,7 +146,7 @@ class NCPipelineBuilder:
* @param tokParser
* @return */
def withTokenParser(tokParser: NCTokenParser): NCPipelineBuilder =
- Objects.requireNonNull(tokParser, "Token parser cannot be null.")
+ require(tokParser != null, "Token parser cannot be null.")
this.tokParser = Some(tokParser)
this
@@ -155,8 +155,8 @@ class NCPipelineBuilder:
* @param entMappers
* @return This instance for call chaining. */
def withEntityMappers(entMappers: List[NCEntityMapper]): NCPipelineBuilder
=
- Objects.requireNonNull(entMappers, "List of entity mappers cannot be
null.")
- entMappers.foreach((p: NCEntityMapper) => Objects.requireNonNull(p,
"Entity mapper cannot be null."))
+ require(entMappers != null, "List of entity mappers cannot be null.")
+ entMappers.foreach((p: NCEntityMapper) => require(p != null, "Entity
mapper cannot be null."))
this.entMappers ++= entMappers
this
@@ -164,7 +164,7 @@ class NCPipelineBuilder:
* @param entMapper
* @return This instance for call chaining. */
def withEntityMapper(entMapper: NCEntityMapper): NCPipelineBuilder =
- Objects.requireNonNull(entMapper, "Entity mapper cannot be null.")
+ require(entMapper != null, "Entity mapper cannot be null.")
this.entMappers += entMapper
this
@@ -186,17 +186,17 @@ class NCPipelineBuilder:
* @param elms
* @return */
def withSemantic(lang: String, macros: Map[String, String], elms:
List[NCSemanticElement]): NCPipelineBuilder =
- Objects.requireNonNull(lang, "Language cannot be null.")
- Objects.requireNonNull(macros, "Macros elements cannot be null.")
- Objects.requireNonNull(elms, "Model elements cannot be null.")
- Objects.requireNonNull(macros, "Macros cannot be null.")
- if elms.isEmpty then throw new IllegalArgumentException("Model
elements cannot be empty.")
+ require(lang != null, "Language cannot be null.")
+ require(macros != null, "Macros elements cannot be null.")
+ require(elms != null, "Model elements cannot be null.")
+ require(macros != null, "Macros cannot be null.")
+ require(elms.nonEmpty, "Model elements cannot be empty.")
lang.toUpperCase match
case "EN" =>
setEnComponents()
entParsers += NCSemanticEntityParser(mkEnStemmer,
mkEnOpenNLPTokenParser, macros, elms)
- case _ => throw new IllegalArgumentException("Unsupported
language: " + lang)
+ case _ => require(false, s"Unsupported language: $lang")
this
/**
@@ -213,13 +213,13 @@ class NCPipelineBuilder:
* @return
*/
def withSemantic(lang: String, mdlSrc: String): NCPipelineBuilder =
- Objects.requireNonNull(lang, "Language cannot be null.")
- Objects.requireNonNull(mdlSrc, "Model source cannot be null.")
+ require(lang != null, "Language cannot be null.")
+ require(mdlSrc != null, "Model source cannot be null.")
lang.toUpperCase match
case "EN" =>
setEnComponents()
this.entParsers += NCSemanticEntityParser(mkEnStemmer,
mkEnOpenNLPTokenParser, mdlSrc)
- case _ => throw new IllegalArgumentException(s"Unsupported
language: $lang")
+ case _ => require(false, s"Unsupported language: $lang")
this
@@ -227,8 +227,8 @@ class NCPipelineBuilder:
* @return */
def build: NCPipeline =
// TODO: Text.
- if tokParser.isEmpty then throw new IllegalArgumentException("Token
parser cannot be null.")
- if entParsers.isEmpty then throw new IllegalStateException("At least
oe entity parser should be defined.")
+ require(tokParser.nonEmpty, "Token parser should be defined.")
+ require(entParsers.nonEmpty, "At least oe entity parser should be
defined.")
new NCPipeline():
override def getTokenParser: NCTokenParser = tokParser.get
diff --git
a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCOpenNLPEntityParser.scala
b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCOpenNLPEntityParser.scala
index 0462b8aa..854bf9b2 100644
---
a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCOpenNLPEntityParser.scala
+++
b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/NCOpenNLPEntityParser.scala
@@ -37,7 +37,7 @@ import scala.util.Using
*/
object NCOpenNLPEntityParser:
def apply(src: String): NCOpenNLPEntityParser =
- Objects.requireNonNull(src, "Model source cannot be null.")
+ require(src != null, "Model source cannot be null.")
new NCOpenNLPEntityParser(List(src))
/**
@@ -45,7 +45,7 @@ object NCOpenNLPEntityParser:
* @param srcs
*/
class NCOpenNLPEntityParser(srcs: List[String]) extends NCEntityParser with
LazyLogging:
- Objects.requireNonNull(srcs, "Models source cannot be null.")
+ require(srcs != null, "Models source cannot be null.")
private var finders: Seq[NameFinderME] = _
private case class Holder(start: Int, end: Int, name: String, probability:
Double)
diff --git
a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticEntityParser.scala
b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticEntityParser.scala
index 29dc62da..26232a8c 100644
---
a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticEntityParser.scala
+++
b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/entity/parser/semantic/NCSemanticEntityParser.scala
@@ -49,10 +49,10 @@ object NCSemanticEntityParser:
macros: Map[String, String],
elements: List[NCSemanticElement]
): NCSemanticEntityParser =
- Objects.requireNonNull(stemmer, "Stemmer cannot be null.")
- Objects.requireNonNull(parser, "Parser cannot be null.")
- Objects.requireNonNull(macros, "Macros cannot be null.")
- Objects.requireNonNull(elements, "Elements cannot be null.")
+ require(stemmer != null, "Stemmer cannot be null.")
+ require(parser != null, "Parser cannot be null.")
+ require(macros != null, "Macros cannot be null.")
+ require(elements != null, "Elements cannot be null.")
new NCSemanticEntityParser(stemmer, parser, macros = macros, elements
= elements)
@@ -68,9 +68,9 @@ object NCSemanticEntityParser:
parser: NCTokenParser,
elements: List[NCSemanticElement]
): NCSemanticEntityParser =
- Objects.requireNonNull(stemmer, "Stemmer cannot be null.")
- Objects.requireNonNull(parser, "Parser cannot be null.")
- Objects.requireNonNull(elements, "Elements cannot be null.")
+ require(stemmer != null, "Stemmer cannot be null.")
+ require(parser != null, "Parser cannot be null.")
+ require(elements != null, "Elements cannot be null.")
new NCSemanticEntityParser(stemmer, parser, macros = Map.empty,
elements = elements)
@@ -82,9 +82,9 @@ object NCSemanticEntityParser:
* @return
*/
def apply(stemmer: NCSemanticStemmer, parser: NCTokenParser, mdlSrc:
String): NCSemanticEntityParser =
- Objects.requireNonNull(stemmer, "Stemmer cannot be null.")
- Objects.requireNonNull(parser, "Parser cannot be null.")
- Objects.requireNonNull(mdlSrc, "Model source cannot be null.")
+ require(stemmer != null, "Stemmer cannot be null.")
+ require(parser != null, "Parser cannot be null.")
+ require(mdlSrc != null, "Model source cannot be null.")
new NCSemanticEntityParser(stemmer, parser, mdlSrc = mdlSrc)
diff --git
a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnSwearWordsTokenEnricher.scala
b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnSwearWordsTokenEnricher.scala
index 6882048f..9f7f565e 100644
---
a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnSwearWordsTokenEnricher.scala
+++
b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/NCEnSwearWordsTokenEnricher.scala
@@ -31,7 +31,7 @@ import java.util.Objects
* @param res
*/
class NCEnSwearWordsTokenEnricher(res: String) extends NCTokenEnricher with
LazyLogging:
- Objects.requireNonNull(res, "Swear words model file cannot be null.")
+ require(res != null, "Swear words model file cannot be null.")
private final val stemmer = new PorterStemmer
private var swearWords: Set[String] = _
diff --git
a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/parser/NCOpenNLPTokenParser.scala
b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/parser/NCOpenNLPTokenParser.scala
index e19f3781..f25652cb 100644
---
a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/parser/NCOpenNLPTokenParser.scala
+++
b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/parser/NCOpenNLPTokenParser.scala
@@ -33,7 +33,7 @@ import java.util.Objects
* @param tokMdl
*/
class NCOpenNLPTokenParser(tokMdl: String) extends NCTokenParser with
LazyLogging:
- Objects.requireNonNull(tokMdl, "Tokenizer model path cannot be null.")
+ require(tokMdl != null, "Tokenizer model path cannot be null.")
@volatile private var tokenizer: TokenizerME = _