This is an automated email from the ASF dual-hosted git repository.
aradzinski pushed a commit to branch NLPCRAFT-296
in repository https://gitbox.apache.org/repos/asf/incubator-nlpcraft.git
The following commit(s) were added to refs/heads/NLPCRAFT-296 by this push:
new 176dc10 WIP.
176dc10 is described below
commit 176dc1061e853f4674cc0e30070ae7d57dd56451
Author: Aaron Radzinzski <[email protected]>
AuthorDate: Thu May 27 21:50:08 2021 -0700
WIP.
---
.../enrichers/stopword/NCStopWordEnricher.scala | 14 +++++--
.../org/apache/nlpcraft/server/sql/NCSql.scala | 23 +++++++----
.../server/sugsyn/NCSuggestSynonymManager.scala | 47 +++++++++++-----------
3 files changed, 49 insertions(+), 35 deletions(-)
diff --git
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/stopword/NCStopWordEnricher.scala
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/stopword/NCStopWordEnricher.scala
index feb2a98..ac440d1 100644
---
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/stopword/NCStopWordEnricher.scala
+++
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/nlp/enrichers/stopword/NCStopWordEnricher.scala
@@ -234,7 +234,13 @@ object NCStopWordEnricher extends NCServerEnricher {
def mkEntry[T](f: WordForm, mkT: Unit => T, isExc: Boolean):((Boolean,
WordForm), T) = (isExc, f) -> mkT(())
def mkMap[T](mkT: Unit => T): Map[(Boolean, WordForm), T] =
- WordForm.values.flatMap(f => Map(mkEntry(f, mkT, isExc = true),
mkEntry(f, mkT, isExc = false))).toMap
+ WordForm
+ .values
+ .toSet
+ .flatMap(f => Map(
+ mkEntry(f, mkT, isExc = true),
+ mkEntry(f, mkT, isExc = false)
+ )).toMap
// Prepares collections.
val mHash = mkMap(_ => new Condition[Word]())
@@ -340,12 +346,11 @@ object NCStopWordEnricher extends NCServerEnricher {
m: Map[(Boolean, WordForm), Condition[T]],
form: WordForm,
mkInstance: (Set[T], Map[String, Set[T]], Map[String, Set[T]])
=> R): R = {
-
val any = m((isExc, form)).any.toSet
val incl = toImmutable(m((isExc, form)).includes)
val excl = toImmutable(m((isExc, form)).excludes)
- mkInstance(any ++ excl.values.flatten, incl, excl)
+ mkInstance(any ++ excl.values.toSet.flatten, incl, excl)
}
def mkHash(form: WordForm): HashHolder = mkHolder(mHash, form,
HashHolder)
@@ -603,7 +608,8 @@ object NCStopWordEnricher extends NCServerEnricher {
}
// Capture the token mix at this point minus the initial stop
words found up to this point.
- val origToks: Seq[(Seq[NCNlpSentenceToken], String)] = (for (toks
<- mix) yield toks).map(s => s -> toStemKey(s))
+ val origToks: Seq[(Seq[NCNlpSentenceToken], String)] =
+ (for (toks <- mix) yield toks.toSeq).map(s => s ->
toStemKey(s)).toSeq
// +--------------------------------------------+
// | Pass #4. |
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sql/NCSql.scala
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sql/NCSql.scala
index d8e2eac..eae5622 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sql/NCSql.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sql/NCSql.scala
@@ -466,9 +466,12 @@ object NCSql extends LazyLogging {
var r = List.empty[R]
catching(psqlErrorCodes) {
- for (ps <- Using.resource { prepare(sql, params) } ; rs <-
Using.resource { ps.executeQuery() } )
- while (rs.next)
- r :+= p(rs)
+ Using.resource(prepare(sql, params)) { ps =>
+ Using.resource(ps.executeQuery()) { rs =>
+ while (rs.next)
+ r :+= p(rs)
+ }
+ }
r
}
@@ -578,9 +581,12 @@ object NCSql extends LazyLogging {
@throws[NCE]
def select[R](sql: String, callback: R => Unit, params: Any*) (implicit p:
RsParser[R]): Unit =
catching(psqlErrorCodes) {
- for (ps <- Using.resource { prepare(sql, params) } ; rs <-
Using.resource { ps.executeQuery() } )
- while (rs.next)
- callback(p(rs))
+ Using.resource(prepare(sql, params)) { ps =>
+ Using.resource(ps.executeQuery()) { rs =>
+ while (rs.next)
+ callback(p(rs))
+ }
+ }
}
/**
@@ -613,13 +619,14 @@ object NCSql extends LazyLogging {
val tbls = mutable.ArrayBuffer.empty[String]
catching(psqlErrorCodes) {
- for (rs <- Using.resource {
connection().getMetaData.getTables(null, null, null, null)})
+ Using.resource(connection().getMetaData.getTables(null, null,
null, null)) { rs =>
while (rs.next) {
val tblSchema = rs.getString(2)
-
+
if (tblSchema != null && tblSchema.toLowerCase == schemaLc)
tbls += rs.getString(3)
}
+ }
}
tbls
diff --git
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sugsyn/NCSuggestSynonymManager.scala
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sugsyn/NCSuggestSynonymManager.scala
index 4753a17..bdd2949 100644
---
a/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sugsyn/NCSuggestSynonymManager.scala
+++
b/nlpcraft/src/main/scala/org/apache/nlpcraft/server/sugsyn/NCSuggestSynonymManager.scala
@@ -342,7 +342,7 @@ object NCSuggestSynonymManager extends NCService {
if (err.get() != null)
throw new NCE("Error while working with
'ctxword' server.", err.get())
- val allSynsStems =
elemSyns.flatMap(_._2).flatten.map(_.stem).toSet
+ val allSynsStems =
elemSyns.flatMap(_._2).toSeq.flatten.map(_.stem).toSet
val nonEmptySgsts = allSgsts.asScala.map(p =>
p._1 -> p._2.asScala).filter(_._2.nonEmpty)
@@ -381,35 +381,36 @@ object NCSuggestSynonymManager extends NCService {
}
val resJ: util.Map[String,
util.List[util.HashMap[String, Any]]] =
- res.map { case (id, data) =>
- val norm =
- if (data.nonEmpty) {
- val factors = data.map(_.score)
+ res.map {
+ case (id, data) =>
+ val norm =
+ if (data.nonEmpty) {
+ val factors =
data.map(_.score)
- val min = factors.min
- val max = factors.max
- var delta = max - min
+ val min = factors.min
+ val max = factors.max
+ var delta = max - min
- if (delta == 0)
- delta = max
+ if (delta == 0)
+ delta = max
- def normalize(v: Double):
Double = (v - min) / delta
+ def normalize(v: Double):
Double = (v - min) / delta
- data.
- map(s =>
SuggestionResult(s.synonym, normalize(s.score))).
- filter(_.score >= minScore)
- }
- else
- Seq.empty
+ data.
+ map(s =>
SuggestionResult(s.synonym, normalize(s.score))).
+ filter(_.score >=
minScore)
+ }
+ else
+ Seq.empty
- id -> norm.map(d => {
- val m = new util.HashMap[String,
Any]()
+ id -> norm.map(d => {
+ val m = new
util.HashMap[String, Any]()
- m.put("synonym",
d.synonym.toLowerCase)
- m.put("score", d.score)
+ m.put("synonym",
d.synonym.toLowerCase)
+ m.put("score", d.score)
- m
- }).asJava
+ m
+ }).asJava
}.asJava
promise.success(