This is an automated email from the ASF dual-hosted git repository.

sergeykamov pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-nlpcraft.git


The following commit(s) were added to refs/heads/master by this push:
     new 496fa53  Solver and related issues.
496fa53 is described below

commit 496fa531bdef3e1e87457b5bef2e80e084b3f38b
Author: Sergey Kamov <[email protected]>
AuthorDate: Fri Feb 18 23:24:02 2022 +0300

    Solver and related issues.
---
 .../main/scala/org/apache/nlpcraft/NCContext.java  |   3 +
 .../main/scala/org/apache/nlpcraft/NCEntity.java   |  10 +
 .../scala/org/apache/nlpcraft/NCIntentMatch.java   |  16 +-
 .../scala/org/apache/nlpcraft/NCModelClient.java   |  38 +-
 .../scala/org/apache/nlpcraft/NCModelConfig.java   |  38 ++
 .../scala/org/apache/nlpcraft/NCPropertyMap.java   |   5 +
 .../org/apache/nlpcraft/NCPropertyMapAdapter.java  |  12 +-
 .../internal/conversation/NCConversationData.scala | 210 +++++++
 .../conversation/NCConversationManager.scala       |  98 +++
 .../internal/dialogflow/NCDialogFlowManager.scala  | 174 ++++++
 .../nlpcraft/internal/impl/NCModelClientImpl.scala | 154 +++--
 ...rocessor.scala => NCModelPipelineManager.scala} | 113 ++--
 .../nlpcraft/internal/intent/NCIDLEntity.scala     |   2 +-
 ...tentMatcher.scala => NCIntentSolverInput.scala} |  16 +-
 ...entMatcher.scala => NCIntentSolverResult.scala} |  30 +-
 .../intent/matcher/NCIntentSolverVariant.scala     |  62 ++
 .../internal/intent/matcher/NCIntentsManager.scala | 655 +++++++++++++++++++++
 .../nlpcraft/internal/makro/NCMacroCompiler.scala  |  16 +-
 .../apache/nlpcraft/internal/util/NCUtils.scala    |  40 +-
 .../en/impl/NCBracketsTokenEnricherImpl.scala      |  20 +-
 .../opennlp/impl/NCOpenNLPTokenParserImpl.scala    |  18 +-
 .../conversation/NCConversationManagerSpec.scala   |  93 +++
 .../dialogflow/NCDialogFlowManagerSpec.scala       | 133 +++++
 .../nlpcraft/internal/impl/NCModelClientSpec.scala |  68 +++
 .../NCModelPipelineManagerSpec.scala}              |  59 +-
 .../impl/scan/NCModelIntentsInvalidArgsSpec.scala  |   9 +-
 .../impl/scan/NCModelIntentsNestedSpec.scala       |   3 +-
 .../internal/impl/scan/NCTestModelJava.java        |   3 +-
 .../compiler/functions/NCIDLFunctionsModel.scala   |   2 +-
 .../parser/opennlp/NCOpenNLPTokenParserSpec.scala  |  29 +-
 .../apache/nlpcraft/nlp/util/NCTestEntity.scala    |   4 +-
 .../apache/nlpcraft/nlp/util/NCTestPipeline.scala  |   2 +-
 .../apache/nlpcraft/nlp/util/NCTestRequest.scala   |   3 +-
 33 files changed, 1887 insertions(+), 251 deletions(-)

diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCContext.java 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCContext.java
index 1db8097..1cde39f 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCContext.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCContext.java
@@ -56,4 +56,7 @@ public interface NCContext {
      * @return
      */
     Collection<NCVariant> getVariants();
+
+    // TODO:
+    List<NCToken> getTokens();
 }
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntity.java 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntity.java
index 5069b03..c24b638 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntity.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCEntity.java
@@ -18,6 +18,7 @@
 package org.apache.nlpcraft;
 
 import java.util.*;
+import java.util.stream.Collectors;
 
 /**
  *
@@ -30,6 +31,15 @@ public interface NCEntity extends NCPropertyMap {
     List<NCToken> getTokens();
 
     /**
+     * Joins all tokens' text with trimming using space as a delimiter. This 
function does not cache the
+     * result and performs text construction on each call. Make sure to cache 
the result to avoid
+     * unnecessary parasitic workload if and when method {@link #getTokens()} 
does not change.
+     */
+    default String mkText() {
+        return getTokens().stream().map(s -> 
s.getText().trim()).collect(Collectors.joining(" ")).trim();
+    }
+
+    /**
      * Gets ID of the request this entity is part of.
      *
      * @return ID of the request this entity is part of.
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentMatch.java 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentMatch.java
index 372c24d..8794cb2 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentMatch.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCIntentMatch.java
@@ -36,9 +36,9 @@ public interface NCIntentMatch {
      * to the order and index of terms in the matching intent. Number of 
sub-lists will always be the same
      * as the number of terms in the matched intent.
      * <p>
-     * Consider using {@link NCIntentTerm} annotation instead for simpler 
access to intent entities.
+     * Consider using {@link NCIntentTerm} annotation instead for simpler 
access to the intent entities.
      *
-     * @return List of list of entities representing matched intent.
+     * @return List of lists of entities representing matched intent.
      * @see #getVariant()
      * @see NCIntentTerm
      */
@@ -47,7 +47,7 @@ public interface NCIntentMatch {
     /**
      * Gets entities for given term. This is a companion method for {@link 
#getIntentEntities()}.
      * <p>
-     * Consider using {@link NCIntentTerm} annotation instead for simpler 
access to intent entities.
+     * Consider using {@link NCIntentTerm} annotation instead for simpler 
access to the intent entities.
      *
      * @param idx Index of the term (starting from <code>0</code>).
      * @return List of entities, potentially {@code null}, for given term.
@@ -59,7 +59,7 @@ public interface NCIntentMatch {
     /**
      * Gets entities for given term. This is a companion method for {@link 
#getIntentEntities()}.
      * <p>
-     * Consider using {@link NCIntentTerm} annotation instead for simpler 
access to intent entities.
+     * Consider using {@link NCIntentTerm} annotation instead for simpler 
access to the intent entities.
      *
      * @param termId ID of the term for which to get entities.
      * @return List of entities, potentially {@code null}, for given term.
@@ -78,4 +78,12 @@ public interface NCIntentMatch {
      * @see #getIntentEntities()
      */
     NCVariant getVariant();
+
+
+    /**
+     * Gets context of the user input query.
+     *
+     * @return Original query context.
+     */
+    NCContext getContext();
 }
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.java 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.java
index fee87cf..54f023b 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelClient.java
@@ -20,12 +20,12 @@ package org.apache.nlpcraft;
 import org.apache.nlpcraft.internal.impl.NCModelClientImpl;
 
 import java.util.Map;
-import java.util.concurrent.*;
+import java.util.function.Predicate;
 
 /**
  *
  */
-public class NCModelClient {
+public class NCModelClient implements AutoCloseable {
     private final NCModelClientImpl impl;
 
     /**
@@ -44,29 +44,26 @@ public class NCModelClient {
      * @return
      * @throws NCException
      */
-    public CompletableFuture<NCResult> ask(String txt, Map<String, Object> 
data, String usrId) {
+    public NCResult ask(String txt, Map<String, Object> data, String usrId) {
         return impl.ask(txt, data, usrId);
     }
 
     /**
      *
-     * @param txt
-     * @param data
      * @param usrId
-     * @return
      * @throws NCException
      */
-    public NCResult askSync(String txt, Map<String, Object> data, String 
usrId) {
-        return impl.askSync(txt, data, usrId);
+    public void clearStm(String usrId) {
+        impl.clearStm(usrId);
     }
 
     /**
      *
      * @param usrId
-     * @throws NCException
+     * @param filter
      */
-    public void clearConversation(String usrId) {
-        impl.clearConversation(usrId);
+    public void clearStm(String usrId, Predicate<NCEntity> filter) {
+        impl.clearStm(usrId, filter);
     }
 
     /**
@@ -79,9 +76,26 @@ public class NCModelClient {
     }
 
     /**
-     * 
+     *
+     * @param usrId
+     * @param filter
+     */
+    public void clearDialog(String usrId, Predicate<NCDialogFlowItem> filter) {
+        impl.clearDialog(usrId, filter);
+    }
+
+    /**
+     *
      */
+    @Override
     public void close() {
         impl.close();
     }
+
+    /**
+     *
+     */
+    public void validateSamples() {
+        impl.validateSamples();
+    }
 }
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelConfig.java 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelConfig.java
index e29411f..dfd8f96 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelConfig.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCModelConfig.java
@@ -17,13 +17,19 @@
 
 package org.apache.nlpcraft;
 
+import java.time.Duration;
 import java.util.*;
 
 /**
  *
  */
 public class NCModelConfig extends NCPropertyMapAdapter {
+    public final long DFLT_CONV_TIMEOUT = Duration.ofMinutes(60).toMillis();
+    public final int DFLT_CONV_DEPTH = 3;
+
     private final String id, name, ver, desc, origin;
+    private long convTimeout = DFLT_CONV_TIMEOUT;
+    private int convDepth = DFLT_CONV_DEPTH;
 
     /**
      * @param id
@@ -100,4 +106,36 @@ public class NCModelConfig extends NCPropertyMapAdapter {
     public String getOrigin() {
         return origin;
     }
+
+    /**
+     *
+     * @return
+     */
+    public long getConversationTimeout() {
+        return convTimeout;
+    }
+
+    /**
+     *
+     * @param convTimeout
+     */
+    public void setConversationTimeout(long convTimeout) {
+        this.convTimeout = convTimeout;
+    }
+
+    /**
+     *
+     * @return
+     */
+    public int getConversationDepth() {
+        return convDepth;
+    }
+
+    /**
+     *
+     * @param convDepth
+     */
+    public void setConversationDepth(int convDepth) {
+        this.convDepth = convDepth;
+    }
 }
diff --git a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMap.java 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMap.java
index 2cb97dc..25a29f1 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMap.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMap.java
@@ -82,4 +82,9 @@ public interface NCPropertyMap {
      * @return
      */
     Set<String> keysSet();
+
+    /**
+     *
+     */
+    void clear();
 }
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMapAdapter.java 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMapAdapter.java
index c2ca3d1..86bb3d0 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMapAdapter.java
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/NCPropertyMapAdapter.java
@@ -17,13 +17,16 @@
 
 package org.apache.nlpcraft;
 
-import java.util.*;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 
 /**
  *
  */
 public class NCPropertyMapAdapter implements NCPropertyMap {
-    private final Map<String, Object> map = new HashMap<>();
+    private final Map<String, Object> map = new ConcurrentHashMap<>();
 
     @Override
     public <T> T get(String key) {
@@ -64,4 +67,9 @@ public class NCPropertyMapAdapter implements NCPropertyMap {
     public Set<String> keysSet() {
         return map.keySet();
     }
+
+    @Override
+    public void clear() {
+        map.clear();
+    }
 }
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationData.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationData.scala
new file mode 100644
index 0000000..f9d97ff
--- /dev/null
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationData.scala
@@ -0,0 +1,210 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.internal.conversation
+
+import java.util
+import java.util.concurrent.ConcurrentHashMap
+import java.util.function.Predicate
+
+import com.typesafe.scalalogging.LazyLogging
+import org.apache.nlpcraft.*
+import org.apache.nlpcraft.internal.ascii.*
+import org.apache.nlpcraft.internal.util.*
+
+import scala.collection.mutable
+import scala.jdk.CollectionConverters.*
+
+/**
+  * An active conversation is an ordered set of utterances for the specific 
user and data model.
+  */
+case class NCConversationData(
+    usrId: String,
+    mdlId: String,
+    timeoutMs: Long,
+    maxDepth: Int
+) extends LazyLogging {
+    private final val data = new NCPropertyMapAdapter()
+
+    case class EntityHolder(entity: NCEntity, var entityTypeUsageTime: Long = 
0)
+    case class ConversationItem(holders: mutable.ArrayBuffer[EntityHolder], 
reqId: String, tstamp: Long)
+
+    // Short-Term-Memory.
+    private val stm = mutable.ArrayBuffer.empty[ConversationItem]
+    private val lastEnts = mutable.ArrayBuffer.empty[Iterable[NCEntity]]
+    private val ctx = mutable.ArrayBuffer.empty[NCEntity]
+
+    @volatile private var lastUpdateTstamp = NCUtils.nowUtcMs()
+    @volatile private var depth = 0
+
+    /**
+      *
+      * @param newCtx
+      */
+    private def replaceContext(newCtx: mutable.ArrayBuffer[NCEntity]): Unit =
+        require(Thread.holdsLock(stm))
+        ctx.clear()
+        ctx ++= newCtx
+
+    /**
+      *
+      */
+    private def squeezeEntities(): Unit =
+        require(Thread.holdsLock(stm))
+        stm --= stm.filter(_.holders.isEmpty)
+
+    /**
+      * Gets called on each input request for given user and model.
+      */
+    def updateEntities(): Unit =
+        val now = NCUtils.nowUtcMs()
+
+        stm.synchronized {
+            depth += 1
+
+            lazy val z = s"usrId=$usrId, mdlId=$mdlId"
+
+            // Conversation cleared by timeout or when there are too much 
unsuccessful requests.
+            if now - lastUpdateTstamp > timeoutMs then
+                stm.clear()
+                logger.trace(s"STM is reset by timeout [$z]")
+            else if depth > maxDepth then
+                stm.clear()
+                logger.trace(s"STM is reset after reaching max depth [$z]")
+            else
+                val minUsageTime = now - timeoutMs
+                val ents = lastEnts.flatten
+
+                for (item <- stm)
+                    val delHs =
+                        // Deleted by timeout for entity type or when an 
entity type used too many requests ago.
+                        item.holders.filter(h => h.entityTypeUsageTime < 
minUsageTime || !ents.contains(h.entity))
+
+                    if delHs.nonEmpty then
+                        item.holders --= delHs
+                        logger.trace(s"STM entity removed [$z, 
reqId=${item.reqId}]")
+                        stepLogEntity(delHs.toSeq.map(_.entity))
+
+                squeezeEntities()
+
+            lastUpdateTstamp = now
+            replaceContext(stm.flatMap(_.holders.map(_.entity)))
+            ack()
+        }
+
+    /**
+      * Clears all entities from this conversation satisfying given predicate.
+      *
+      * @param p Java-side predicate.
+      */
+    def clear(p: Predicate[NCEntity]): Unit =
+        stm.synchronized {
+            for (item <- stm) item.holders --= item.holders.filter(h => 
p.test(h.entity))
+            squeezeEntities()
+            replaceContext(ctx.filter(ent => !p.test(ent)))
+        }
+
+        logger.trace(s"STM is cleared [usrId=$usrId, mdlId=$mdlId]")
+
+    /**
+      *
+      * @param ents
+      */
+    private def stepLogEntity(ents: Seq[NCEntity]): Unit =
+        for (ent <- ents) logger.trace(s"  +-- $ent")
+
+    /**
+      * Adds given entities to the conversation.
+      *
+      * @param reqId Server request ID.
+      * @param ents Entities to add to the conversation STM.
+      */
+    def addEntities(reqId: String, ents: Seq[NCEntity]): Unit =
+        stm.synchronized {
+            depth = 0
+            lastEnts += ents // Last used entities processing.
+
+            val delCnt = lastEnts.length - maxDepth
+            if delCnt > 0 then lastEnts.remove(0, delCnt)
+
+            val senEnts = ents.filter(_.getRequestId == reqId)
+            if senEnts.nonEmpty then
+                // Adds new conversation element.
+                stm += ConversationItem(
+                    mutable.ArrayBuffer.empty[EntityHolder] ++ 
senEnts.map(EntityHolder(_)),
+                    reqId,
+                    lastUpdateTstamp
+                )
+
+                logger.trace(s"Added new entities to STM [usrId=$usrId, 
mdlId=$mdlId, reqId=$reqId]")
+                stepLogEntity(ents)
+
+                val registered = mutable.HashSet.empty[Seq[String]]
+                for (item <- stm.reverse; (gs, hs) <- item.holders.groupBy(t 
=> if (t.entity.getGroups != null) t.entity.getGroups.asScala else Seq.empty))
+                    val grps = gs.toSeq.sorted
+
+                    // Reversed iteration.
+                    // N : (A, B) -> registered.
+                    // N-1 : (C) -> registered.
+                    // N-2 : (A, B) or (A, B, X) etc -> deleted, because 
registered has less groups.
+                    registered.find(grps.containsSlice) match
+                        case Some(_) =>
+                            item.holders --= hs
+                            for (ent <- hs.map(_.entity)) logger.trace(s"STM 
entity overridden: $ent")
+
+                        case None => registered += grps
+
+                // Updates entity usage time.
+                stm.foreach(_.holders.filter(h => 
ents.contains(h.entity)).foreach(_.entityTypeUsageTime = lastUpdateTstamp))
+
+                squeezeEntities()
+        }
+
+    /**
+      * Prints out ASCII table for current STM.
+      */
+    private def ack(): Unit =
+        require(Thread.holdsLock(stm))
+
+        val z = s"mdlId=$mdlId, usrId=$usrId"
+
+        if ctx.isEmpty then logger.trace(s"STM is empty for [$z]")
+        else
+            val tbl = NCAsciiTable("Entity ID", "Groups", "Request ID")
+            ctx.foreach(ent => tbl += (
+                ent.getId,
+                ent.getGroups.asScala.mkString(", "),
+                ent.getRequestId
+            ))
+            logger.info(s"Current STM for [$z]:\n${tbl.toString()}")
+
+    /**
+      *
+      * @return
+      */
+    def getEntities: Seq[NCEntity] =
+        // TODO: copy?
+        stm.synchronized {
+            val reqIds = ctx.map(_.getRequestId).distinct.zipWithIndex.toMap
+            ctx.groupBy(_.getRequestId).toSeq.sortBy(p => 
reqIds(p._1)).reverse.flatMap(_._2)
+        }
+
+    /**
+      * TODO: thread safe?
+      */
+    val getUserData: NCPropertyMap = data
+}
\ No newline at end of file
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationManager.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationManager.scala
new file mode 100644
index 0000000..bc7c557
--- /dev/null
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/conversation/NCConversationManager.scala
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.internal.conversation
+
+import com.typesafe.scalalogging.LazyLogging
+import org.apache.nlpcraft.*
+import org.apache.nlpcraft.internal.util.NCUtils
+
+import scala.collection.*
+import scala.jdk.CollectionConverters.*
+
+/**
+  * Conversation manager.
+  */
+class NCConversationManager(cfg: NCModelConfig) extends LazyLogging:
+    case class Value(conv: NCConversationData, var tstamp: Long = 0)
+    private final val convs: mutable.Map[String, Value] = 
mutable.HashMap.empty[String, Value]
+    @volatile private var gc: Thread = _
+
+    /**
+      * Gets conversation for given user ID.
+      *
+      * @param usrId User ID.
+      * @return New or existing conversation.
+      */
+    def getConversation(usrId: String): NCConversationData =
+        convs.synchronized {
+            val v = convs.getOrElseUpdate(
+                usrId,
+                Value(NCConversationData(usrId, cfg.getId, 
cfg.getConversationTimeout, cfg.getConversationDepth))
+            )
+
+            v.tstamp = NCUtils.nowUtcMs()
+            convs.notifyAll()
+            v.conv
+        }
+
+    /**
+      * Gets next clearing time.
+      */
+    private def clearForTimeout(): Long =
+        require(Thread.holdsLock(convs))
+
+        val now = NCUtils.now()
+        val delKeys = mutable.HashSet.empty[String]
+
+        for ((key, value) <- convs)
+            if value.tstamp < now - cfg.getConversationTimeout then
+                value.conv.getUserData.clear()
+                delKeys += key
+
+        convs --= delKeys
+
+        if convs.nonEmpty then convs.values.map(v => v.tstamp + 
v.conv.timeoutMs).min
+        else Long.MaxValue
+
+    /**
+      *
+      * @return
+      */
+    def start(): Unit =
+        gc = NCUtils.mkThread("conv-mgr-gc", cfg.getId) { t =>
+            while (!t.isInterrupted)
+                try
+                    convs.synchronized {
+                        val sleepTime = clearForTimeout() - NCUtils.now()
+                        if sleepTime > 0 then
+                            logger.trace(s"${t.getName} waits for $sleepTime 
ms.")
+                            convs.wait(sleepTime)
+                    }
+                catch
+                    case _: InterruptedException => // No-op.
+                    case e: Throwable => logger.error(s"Unexpected error for 
thread: ${t.getName}", e)
+        }
+        gc.start()
+
+    /**
+      *
+      */
+    def close(): Unit =
+        NCUtils.stopThread(gc)
+        gc = null
+        convs.clear()
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/dialogflow/NCDialogFlowManager.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/dialogflow/NCDialogFlowManager.scala
new file mode 100644
index 0000000..adac2f1
--- /dev/null
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/dialogflow/NCDialogFlowManager.scala
@@ -0,0 +1,174 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.internal.dialogflow
+
+import com.typesafe.scalalogging.LazyLogging
+import org.apache.nlpcraft.*
+import org.apache.nlpcraft.internal.ascii.NCAsciiTable
+
+import java.text.DateFormat
+import java.util
+import java.util.*
+import scala.collection.*
+import com.typesafe.scalalogging.LazyLogging
+import org.apache.nlpcraft.internal.util.NCUtils
+
+import java.time.format.DateTimeFormatter
+
+/**
+ * Dialog flow manager.
+ */
+class NCDialogFlowManager(cfg: NCModelConfig) extends LazyLogging:
+    private final val flow = mutable.HashMap.empty[String, 
mutable.ArrayBuffer[NCDialogFlowItem]]
+
+    @volatile private var gc: Thread = _
+
+    /**
+      *  Gets next clearing time.
+      */
+    private def clearForTimeout(): Long =
+        require(Thread.holdsLock(flow))
+
+        val timeout = cfg.getConversationTimeout
+        val bound = NCUtils.now() - timeout
+        var next = Long.MaxValue
+
+        val delKeys = mutable.ArrayBuffer.empty[String]
+
+        for ((usrId, values) <- flow)
+            values --= values.filter(_.getRequest.getReceiveTimestamp < bound)
+
+            if values.nonEmpty then
+                val candidate = 
values.map(_.getRequest.getReceiveTimestamp).min + timeout
+                if next > candidate then next = candidate
+                else
+                    delKeys += usrId
+
+        if delKeys.nonEmpty then flow --= delKeys
+
+        next
+
+    /**
+      *
+      * @return
+      */
+    def start(): Unit =
+        gc = NCUtils.mkThread("dialog-mgr-gc", cfg.getId) { t =>
+            while (!t.isInterrupted)
+                try
+                    flow.synchronized {
+                        val sleepTime = clearForTimeout() - NCUtils.now()
+
+                        if sleepTime > 0 then
+                            logger.trace(s"${t.getName} waits for $sleepTime 
ms.")
+                            flow.wait(sleepTime)
+                    }
+                catch
+                    case _: InterruptedException => // No-op.
+                    case e: Throwable => logger.error(s"Unexpected error for 
thread: ${t.getName}", e)
+        }
+
+        gc.start()
+    /**
+      *
+      */
+    def close(): Unit =
+        NCUtils.stopThread(gc)
+        gc = null
+        flow.clear()
+
+    /**
+      * Adds matched (winning) intent to the dialog flow.
+      *
+      * @param intentMatch
+      * @param res Intent callback result.
+      * @param ctx Original query context.
+      */
+    def addMatchedIntent(intentMatch: NCIntentMatch, res: NCResult, ctx: 
NCContext): Unit =
+        val item: NCDialogFlowItem = new NCDialogFlowItem:
+            override val getIntentMatch: NCIntentMatch = intentMatch
+            override val getRequest: NCRequest = ctx.getRequest
+            override val getResult: NCResult = res
+
+        flow.synchronized {
+            flow.getOrElseUpdate(ctx.getRequest.getUserId, 
mutable.ArrayBuffer.empty[NCDialogFlowItem]).append(item)
+            flow.notifyAll()
+        }
+
+    /**
+      * Gets sequence of dialog flow items sorted from oldest to newest (i.e. 
dialog flow) for given user ID.
+      *
+      * @param usrId User ID.
+      * @return Dialog flow.
+      */
+    def getDialogFlow(usrId: String): Seq[NCDialogFlowItem] =
+        // TODO: copy?
+        flow.synchronized { flow.get(usrId) } match
+            case Some(buf) => buf.toSeq
+            case None => Seq.empty
+
+    /**
+      * Prints out ASCII table for current dialog flow.
+      *
+      * @param usrId User ID.
+      */
+    def ack(usrId: String): Unit =
+        val tbl = NCAsciiTable(
+            "#",
+            "Intent ID",
+            "Request ID",
+            "Text",
+            "Received"
+        )
+
+        getDialogFlow(usrId).zipWithIndex.foreach { (itm, idx) =>
+            tbl += (
+                idx + 1,
+                itm.getIntentMatch.getIntentId,
+                itm.getRequest.getRequestId,
+                itm.getRequest.getText,
+                DateFormat.getDateTimeInstance.format(new 
Date(itm.getRequest.getReceiveTimestamp))
+            )
+        }
+
+        logger.info(s"""Current dialog flow (oldest first) for 
[mdlId=${cfg.getId}, usrId=$usrId]\n${tbl.toString()}""")
+
+    /**
+      * Clears dialog history for given user ID.
+      *
+      * @param usrId User ID.
+      */
+    def clear(usrId: String): Unit =
+        flow.synchronized {
+            flow -= usrId
+            flow.notifyAll()
+        }
+
+    /**
+      * Clears dialog history for given user ID and predicate.
+      *
+      * @param usrId User ID.
+      * @param mdlId Model ID.
+      * @param pred Intent ID predicate.
+      * @param parent Parent span, if any.
+      */
+    def clear(usrId: String, pred: NCDialogFlowItem => Boolean): Unit =
+        flow.synchronized {
+            flow(usrId) = flow(usrId).filterNot(pred)
+            flow.notifyAll()
+        }
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelClientImpl.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelClientImpl.scala
index 5fc70bb..a4e6d62 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelClientImpl.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelClientImpl.scala
@@ -20,100 +20,172 @@ package org.apache.nlpcraft.internal.impl
 import com.typesafe.scalalogging.LazyLogging
 import org.apache.nlpcraft.*
 import org.apache.nlpcraft.internal.*
+import org.apache.nlpcraft.internal.ascii.NCAsciiTable
+import org.apache.nlpcraft.internal.conversation.*
+import org.apache.nlpcraft.internal.dialogflow.NCDialogFlowManager
+import org.apache.nlpcraft.internal.impl.*
+import org.apache.nlpcraft.internal.intent.matcher.*
 import org.apache.nlpcraft.internal.util.*
 
+import java.util
 import java.util.concurrent.*
-import java.util.concurrent.atomic.AtomicReference
-import java.util.{Objects, List as JList, Map as JMap}
-import scala.collection.mutable
+import java.util.concurrent.atomic.*
+import java.util.function.*
+import java.util.{ArrayList, Objects, UUID, Collections as JColls, List as 
JList, Map as JMap}
+import scala.collection.{immutable, mutable}
 import scala.concurrent.ExecutionContext
 import scala.jdk.CollectionConverters.*
+import scala.jdk.OptionConverters.*
 
 /**
   *
   * @param mdl
   */
 class NCModelClientImpl(mdl: NCModel) extends LazyLogging:
-    private val plProc = NCModelPipelineProcessor(mdl)
-    private var plSrvs: Seq[NCLifecycle] = _
+    verify()
 
-    init(mdl.getConfig, mdl.getPipeline)
+    private val intents = NCModelScanner.scan(mdl)
+    private val convMgr = NCConversationManager(mdl.getConfig)
+    private val dlgMgr = NCDialogFlowManager(mdl.getConfig)
+    private val plMgr = new NCModelPipelineManager(mdl.getConfig, 
mdl.getPipeline)
+    private val intentsMgr = NCIntentsManager(dlgMgr, intents.map(p => 
p.intent -> p.function).toMap)
+
+    init()
 
     /**
       *
       * @param cfg
       * @param pipeline
       */
-    private def init(cfg: NCModelConfig, pipeline: NCModelPipeline): Unit =
+    private def verify(): Unit =
+        Objects.requireNonNull(mdl, "Model cannot be null.")
+
+        val cfg = mdl.getConfig
+        val pipeline = mdl.getPipeline
+
         Objects.requireNonNull(cfg.getId, "Model ID cannot be null.")
         Objects.requireNonNull(cfg.getName, "Model name cannot be null.")
         Objects.requireNonNull(cfg.getVersion, "Model version cannot be null.")
         Objects.requireNonNull(pipeline.getTokenParser, "Token parser cannot 
be null.")
         Objects.requireNonNull(pipeline.getEntityParsers, "List of entity 
parsers in the pipeline cannot be null.")
+
         if pipeline.getEntityParsers.isEmpty then E(s"At least one entity 
parser must be specified in the pipeline.")
 
-        val buf = mutable.ArrayBuffer.empty[NCLifecycle] ++ 
pipeline.getEntityParsers.asScala
+    /**
+      *
+      */
+    private def init(): Unit =
+        convMgr.start()
+        dlgMgr.start()
+        plMgr.start()
+
+
+     /*
+      * @param txt
+      * @param data
+      * @param usrId
+      * @return
+      */
+    def ask(txt: String, data: JMap[String, AnyRef], usrId: String): NCResult =
+        val plData = plMgr.prepare(txt, data, usrId)
 
-        def add[T <: NCLifecycle](list: JList[T]): Unit = if list != null then 
buf ++= list.asScala
+        val userId = plData.request.getUserId
+        val convHldr = convMgr.getConversation(userId)
+        val allEnts = plData.variants.flatMap(_.getEntities.asScala)
 
-        add(pipeline.getTokenEnrichers)
-        add(pipeline.getTokenValidators)
-        add(pipeline.getEntityParsers)
-        add(pipeline.getEntityParsers)
-        add(pipeline.getEntityValidators)
-        add(pipeline.getTokenValidators)
+        val conv: NCConversation =
+            new NCConversation:
+                override val getSession: NCPropertyMap = convHldr.getUserData
+                override val getStm: JList[NCEntity] = 
convHldr.getEntities.asJava
+                override val getDialogFlow: JList[NCDialogFlowItem] = 
dlgMgr.getDialogFlow(userId).asJava
+                override def clearStm(filter: Predicate[NCEntity]): Unit = 
convHldr.clear(filter)
+                override def clearDialog(filter: Predicate[NCDialogFlowItem]): 
Unit = dlgMgr.clear(userId, (s: NCDialogFlowItem) => filter.test(s))
+
+        val ctx: NCContext =
+            new NCContext:
+                override def isOwnerOf(ent: NCEntity): Boolean = 
allEnts.contains(ent)
+                override val getModelConfig: NCModelConfig = mdl.getConfig
+                override val getRequest: NCRequest = plData.request
+                override val getConversation: NCConversation = conv
+                override val getVariants: util.Collection[NCVariant] = 
plData.variants.asJava
+                override val getTokens: JList[NCToken] = plData.tokens
+
+        intentsMgr.solve(NCIntentSolverInput(ctx, mdl))
 
-        plSrvs = buf.toSeq
-        processServices(_.onStart(cfg), "started")
 
     /**
       *
-      * @param act
-      * @param actVerb
+      * @param usrId
       */
-    private def processServices(act: NCLifecycle => Unit, actVerb: String): 
Unit =
-        NCUtils.execPar(plSrvs.map(p =>
-            () => {
-                act(p)
-                logger.info(s"Service $actVerb: '${p.getClass.getName}'")
-            }
-        )*)(ExecutionContext.Implicits.global)
+    def clearStm(usrId: String): Unit = convMgr.getConversation(usrId).clear(_ 
=> true)
 
     /**
       *
-      * @param txt
-      * @param data
       * @param usrId
-      * @return
+      * @param filter
       */
-    def ask(txt: String, data: JMap[String, AnyRef], usrId: String): 
CompletableFuture[NCResult] =
-        plProc.ask(txt, data, usrId)
+    def clearStm(usrId: String, filter: Predicate[NCEntity]): Unit = 
convMgr.getConversation(usrId).clear(filter)
 
     /**
       *
-      * @param txt
-      * @param data
       * @param usrId
-      * @return
       */
-    def askSync(txt: String, data: JMap[String, AnyRef], usrId: String): 
NCResult =
-        plProc.askSync(txt, data, usrId)
+    def clearDialog(usrId: String): Unit = dlgMgr.clear(usrId)
 
     /**
       *
       * @param usrId
       */
-    def clearConversation(usrId: String): Unit = ???
+    def clearDialog(usrId: String, filter: Predicate[NCDialogFlowItem]): Unit 
= dlgMgr.clear(usrId, (i: NCDialogFlowItem) => filter.test(i))
 
     /**
       *
-      * @param usrId
       */
-    def clearDialog(usrId: String): Unit = ???
+    def validateSamples(): Unit =
+        case class Result(intentId: String, text: String, pass: Boolean, 
error: Option[String], time: Long)
+
+        val userId = UUID.randomUUID().toString
+        val results = mutable.ArrayBuffer.empty[Result]
+
+        def now: Long = System.currentTimeMillis()
+
+        for (i <- intents; samples <- i.samples)
+            for (sample <- samples)
+                val t = now
+
+                try
+                    ask(sample, null, userId)
+
+                    results += Result(i.intent.id, sample, true, None, now - t)
+                catch
+                    case e: Throwable =>
+                        results += Result(i.intent.id, sample, true, 
Option(e.getMessage), now - t)
+
+            clearDialog(userId)
+            clearStm(userId)
+
+        val tbl = NCAsciiTable()
+
+        tbl #= ("Intent ID", "+/-", "Text", "Error", "ms.")
+
+        for (res <- results)
+            tbl += (
+                res.intentId,
+                if res.pass then "OK" else "FAIL",
+                res.text,
+                res.error.getOrElse(""),
+                res.time
+            )
+
+        val passCnt = results.count(_.pass)
+        val failCnt = results.count(!_.pass)
+
+        tbl.info(logger, Option(s"Model auto-validation results: OK $passCnt, 
FAIL $failCnt:"))
 
     /**
       *
       */
     def close(): Unit =
-        plProc.close()
-        processServices(_.onStop(mdl.getConfig), "stopped")
+        plMgr.close()
+        dlgMgr.close()
+        convMgr.close()
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineProcessor.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineManager.scala
similarity index 65%
rename from 
nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineProcessor.scala
rename to 
nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineManager.scala
index 14faacf..1a43827 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineProcessor.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineManager.scala
@@ -18,62 +18,70 @@
 package org.apache.nlpcraft.internal.impl
 
 import com.typesafe.scalalogging.LazyLogging
-import org.apache.nlpcraft.*
+import org.apache.nlpcraft.{NCModelConfig, NCModelPipeline, *}
+import org.apache.nlpcraft.internal.conversation.*
+import org.apache.nlpcraft.internal.dialogflow.NCDialogFlowManager
 import org.apache.nlpcraft.internal.impl.*
+import org.apache.nlpcraft.internal.intent.matcher.*
 import org.apache.nlpcraft.internal.util.*
 
 import java.util
 import java.util.concurrent.*
 import java.util.concurrent.atomic.*
-import java.util.{ArrayList, UUID, List as JList, Map as JMap}
-import scala.collection.immutable
-import scala.jdk.OptionConverters.*
+import java.util.function.Predicate
+import java.util.{ArrayList, Objects, UUID, Collections as JColls, List as 
JList, Map as JMap}
+import scala.collection.{immutable, mutable}
 import scala.concurrent.ExecutionContext
 import scala.jdk.CollectionConverters.*
+import scala.jdk.OptionConverters.*
 
 /**
   *
-  * @param mdl
+  * @param request
+  * @param variants
+  * @param tokens
+  * @param checkCancel
   */
-class NCModelPipelineProcessor(mdl: NCModel) extends LazyLogging:
-    /**
-      *
-      * @param req
-      * @param vars
-      * @param checkCancel
-      */
-    case class VariantsHolder(req: NCRequest, vars: Seq[NCVariant], 
checkCancel: Option[() => Unit])
-
-    require(mdl != null)
-    require(mdl.getPipeline.getTokenParser != null)
-    require(mdl.getPipeline.getEntityParsers != null)
-    require(mdl.getPipeline.getEntityParsers.size() > 0)
+case class NCPipelineData(request: NCRequest, variants: Seq[NCVariant], 
tokens: JList[NCToken], checkCancel: Option[() => Unit])
 
-    private val pipeline = mdl.getPipeline
+/**
+  *
+  * @param cfg
+  * @param pipeline
+  */
+class NCModelPipelineManager(cfg: NCModelConfig, pipeline: NCModelPipeline) 
extends LazyLogging:
     private val pool = new java.util.concurrent.ForkJoinPool()
-    private val cfg = mdl.getConfig
     private val tokParser = pipeline.getTokenParser
     private val tokEnrichers = nvl(pipeline.getTokenEnrichers)
     private val entEnrichers = nvl(pipeline.getEntityEnrichers)
     private val entParsers = nvl(pipeline.getEntityParsers)
     private val tokVals = nvl(pipeline.getTokenValidators)
     private val entVals = nvl(pipeline.getEntityValidators)
-    private val varFilter = pipeline.getVariantFilter.toScala
+    private val varFilterOpt = pipeline.getVariantFilter.toScala
+
+    private val allSrvs: Seq[NCLifecycle] =
+        tokEnrichers ++ entEnrichers ++ entParsers ++ tokVals ++ entVals ++ 
varFilterOpt.toSeq
 
     /**
       *
-      * @param list
-      * @tparam T
-      * @return
+      * @param act
+      * @param actVerb
       */
-    private def nvl[T](list: JList[T]): Seq[T] = if list == null then 
Seq.empty else list.asScala.toSeq
+    private def processServices(act: NCLifecycle => Unit, actVerb: String): 
Unit =
+        NCUtils.execPar(allSrvs.map(p =>
+            () => {
+                act(p)
+                logger.info(s"Service $actVerb: '${p.getClass.getName}'")
+            }
+        )*)(ExecutionContext.Implicits.global)
 
     /**
       *
-      * @param h
+      * @param list
+      * @tparam T
       * @return
       */
-    private def matchIntent(h: VariantsHolder): NCResult = ???
+    private def nvl[T](list: JList[T]): Seq[T] = if list == null then 
Seq.empty else list.asScala.toSeq
 
     /**
       *
@@ -83,12 +91,7 @@ class NCModelPipelineProcessor(mdl: NCModel) extends 
LazyLogging:
       * @param checkCancel
       * @return
       */
-    private[internal] def prepVariants(
-        txt: String,
-        data: JMap[String, AnyRef],
-        usrId: String,
-        checkCancel: Option[() => Unit] = None
-    ): VariantsHolder =
+    def prepare(txt: String, data: JMap[String, AnyRef], usrId: String, 
checkCancel: Option[() => Unit] = None): NCPipelineData =
         require(txt != null && usrId != null)
 
         /**
@@ -147,49 +150,25 @@ class NCModelPipelineProcessor(mdl: NCModel) extends 
LazyLogging:
 
         var variants: JList[NCVariant] =
             if overlapEnts.nonEmpty then
-                
NCModelPipelineHelper.findCombinations(overlapEnts.map(_.asJava).asJava, pool)
-                    .asScala.map(_.asScala).map(delComb =>
+                NCModelPipelineHelper.
+                    findCombinations(overlapEnts.map(_.asJava).asJava, pool).
+                    asScala.map(_.asScala).map(delComb =>
                         val delSet = delComb.toSet
                         newVariant(entities.filter(!delSet.contains(_)))
                     ).asJava
             else
                 Seq(newVariant(entities)).asJava
 
-        if varFilter.isDefined then
+        if varFilterOpt.isDefined then
             check()
-            variants = varFilter.get.filter(req, cfg, variants)
-
-        VariantsHolder(req, variants.asScala.toSeq, checkCancel)
-
-    /**
-      *
-      * @param txt
-      * @param data
-      * @param usrId
-      * @return
-      * @throws NCRejection
-      * @throws NCCuration
-      * @throws NCException
-      */
-    def askSync(txt: String, data: JMap[String, AnyRef], usrId: String): 
NCResult =
-        matchIntent(prepVariants(txt, data, usrId))
-
-    /**
-      * TODO: explain all exceptions that are thrown by the future.
-      *
-      * @param txt
-      * @param data
-      * @param usrId
-      * @return
-      */
-    def ask(txt: String, data: JMap[String, AnyRef], usrId: String): 
CompletableFuture[NCResult] =
-        val fut = new CompletableFuture[NCResult]
-        val check = () => if fut.isCancelled then
-            E(s"Asynchronous ask is interrupted [txt=$txt, usrId=$usrId]")
+            variants = varFilterOpt.get.filter(req, cfg, variants)
 
-        fut.completeAsync(() => matchIntent(prepVariants(txt, data, usrId, 
Option(check))))
+        NCPipelineData(req, variants.asScala.toSeq, toks, checkCancel)
 
+    def start(): Unit = processServices(_.onStart(cfg), "started")
     /**
       *
       */
-    def close(): Unit = NCUtils.shutdownPool(pool)
+    def close(): Unit =
+        processServices(_.onStop(cfg), "stopped")
+        NCUtils.shutdownPool(pool)
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala
index f10c577..76f3c78 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/NCIDLEntity.scala
@@ -26,7 +26,7 @@ import scala.jdk.CollectionConverters.*
   * @param idx
   */
 class NCIDLEntity(ent: NCEntity, idx: Int):
-    private lazy val txt = ent.getTokens.asScala.map(_.getText).mkString(" ")
+    private lazy val txt = ent.mkText()
 
     def getImpl: NCEntity = ent
     def getText: String = txt
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentMatcher.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverInput.scala
similarity index 78%
copy from 
nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentMatcher.scala
copy to 
nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverInput.scala
index 5fde322..9a8587f 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentMatcher.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverInput.scala
@@ -17,11 +17,17 @@
 
 package org.apache.nlpcraft.internal.intent.matcher
 
+import org.apache.nlpcraft.*
+
 /**
+ * Input data for intent solver.
   *
+  * @param context
+  * @model model
+  * @param intentMatch
   */
-object NCIntentMatcher:
-    /**
-      *
-      */
-    def bestMatch(): Unit = ???
\ No newline at end of file
+case class NCIntentSolverInput(
+    context: NCContext,
+    model: NCModel,
+    var intentMatch: NCIntentMatch = null
+)
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentMatcher.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverResult.scala
similarity index 61%
rename from 
nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentMatcher.scala
rename to 
nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverResult.scala
index 5fde322..59c2880 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentMatcher.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverResult.scala
@@ -17,11 +17,31 @@
 
 package org.apache.nlpcraft.internal.intent.matcher
 
+import org.apache.nlpcraft.*
+
+/**
+  * Intent solver engine result. Using basic case class for easier Java 
interop.
+  *
+  * @param termId
+  * @param entities
+  */
+case class NCIntentEntitiesGroup(
+    termId: Option[String],
+    entities: Seq[NCEntity]
+)
+
 /**
   *
+  * @param intentId
+  * @param fn
+  * @param groups
+  * @param variant
+  * @param variantIdx
   */
-object NCIntentMatcher:
-    /**
-      *
-      */
-    def bestMatch(): Unit = ???
\ No newline at end of file
+case class NCIntentSolverResult(
+    intentId: String,
+    fn: NCIntentMatch => NCResult,
+    groups: Seq[NCIntentEntitiesGroup],
+    variant: NCIntentSolverVariant,
+    variantIdx: Int
+)
\ No newline at end of file
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverVariant.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverVariant.scala
new file mode 100644
index 0000000..bb7a3e2
--- /dev/null
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentSolverVariant.scala
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.internal.intent.matcher
+
+import org.apache.nlpcraft.*
+
+import java.util
+import scala.jdk.CollectionConverters.*
+
+/**
+ * Sentence variant & its weight.
+  *
+  * @param entities
+  */
+case class NCIntentSolverVariant(entities: Seq[NCEntity]) extends 
Ordered[NCIntentSolverVariant]:
+    private lazy val weights = calcWeight()
+
+    /**
+      *
+      * @param toks
+      */
+    private def calcSparsity(toks: Seq[NCToken]): Int =
+        val idxs = toks.map(_.getIndex)
+        idxs.zipWithIndex.tail.map { (v, i) => Math.abs(v - idxs(i - 1)) }.sum 
- idxs.length + 1
+
+    /**
+     * Calculates weight components sequence.
+     */
+    private def calcWeight(): Seq[Int] =
+        val toks: Seq[Seq[NCToken]] = entities.map(_.getTokens.asScala.toSeq)
+
+        val toksCnt = toks.map(_.size).sum
+        val avgToksPerEntity = if toksCnt > 0 then 
Math.round((entities.size.toFloat / toksCnt) * 100) else 0
+        val totalSparsity = -toks.map(calcSparsity).sum  // Less is better.
+
+        // Order is important.
+        Seq(toksCnt, avgToksPerEntity, totalSparsity)
+
+    override def compare(other: NCIntentSolverVariant): Int =
+        def compareWeight(weight1: Int, weight2: Int): Option[Int] =
+            val res = Integer.compare(weight1, weight2)
+            Option.when(res != 0)(res)
+
+        weights.zip(other.weights).flatMap { (w1, w2) => compareWeight(w1, w2) 
}.to(LazyList).headOption.getOrElse(0)
+
+    // TODO:
+    override def toString: String = s"Weights: ${weights.mkString("[", ",", 
"]")}"
\ No newline at end of file
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentsManager.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentsManager.scala
new file mode 100644
index 0000000..bc87ad9
--- /dev/null
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/intent/matcher/NCIntentsManager.scala
@@ -0,0 +1,655 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.internal.intent.matcher
+
+import com.typesafe.scalalogging.LazyLogging
+import org.apache.nlpcraft.*
+import org.apache.nlpcraft.internal.ascii.NCAsciiTable
+import org.apache.nlpcraft.internal.dialogflow.NCDialogFlowManager
+import org.apache.nlpcraft.internal.intent.*
+
+import java.util.{Collections, List as JList}
+import java.util.function.Function
+import scala.annotation.targetName
+import scala.collection.mutable
+import scala.collection.mutable.ArrayBuffer
+import scala.jdk.CollectionConverters.*
+import scala.language.postfixOps
+
+/**
+ * Intent solver that finds the best matching intent given user sentence.
+ */
+class NCIntentsManager(dialog: NCDialogFlowManager, intents: Map[NCIDLIntent, 
NCIntentMatch => NCResult]) extends LazyLogging:
+    /**
+     * NOTE: not thread-safe.
+     */
+    private class Weight(ws: Int*) extends Ordered[Weight]:
+        private val buf = mutable.ArrayBuffer[Int]() ++ ws
+
+        /**
+         * Adds given weight to this weight.
+         *
+         * @param that Weight to add.
+         * @return
+         */
+        @targetName("plusEqual")
+        def +=(that: Weight): Weight =
+            val tmp = mutable.ArrayBuffer[Int]()
+            for (i <- 0 until Math.max(buf.size, that.buf.size))
+                tmp.append(norm(i, buf) + norm(i, that.buf))
+            buf.clear()
+            buf ++= tmp
+            this
+
+        /**
+         * Appends new weight.
+         *
+         * @param w New weight to append.
+         * @return
+         */
+        def append(w: Int): Weight =
+            buf.append(w)
+            this
+
+        /**
+         * Prepends new weight.
+         *
+         * @param w New weight to prepend.
+         * @return
+         */
+        def prepend(w: Int): Weight =
+            buf.prepend(w)
+            this
+
+        /**
+         * Sets specific weight at a given index.
+         *
+         * @param idx
+         * @param w
+         */
+        def setWeight(idx: Int, w: Int): Unit =
+            buf(idx) = w
+
+        /**
+         * Gets element at given index or zero if index is out of bounds.
+         *
+         * @param i Index in collection.
+         * @param c Collection.
+         * @return
+         */
+        private def norm(i: Int, c: mutable.ArrayBuffer[Int]): Int = if i < 
c.size then c(i) else 0
+
+        /**
+         *
+         * @param that
+         * @return
+         */
+        override def compare(that: Weight): Int =
+            def compareWeight(idx: Int): Option[Int] =
+                val res = Integer.compare(norm(idx, buf), norm(idx, that.buf))
+                Option.when(res != 0)(res)
+
+            (0 until Math.max(buf.size, 
that.buf.size)).flatMap(compareWeight).to(LazyList).headOption.getOrElse(0)
+
+        def toSeq: Seq[Int] = buf.toSeq
+
+        override def toString: String = buf.mkString("[", ", ", "]")
+
+    /**
+     *
+     * @param used
+     * @param entity
+     */
+    private case class IntentEntity(
+        var used: Boolean,
+        var conv: Boolean,
+        entity: NCEntity
+    )
+
+    /**
+     * @param termId
+     * @param usedEntities
+     * @param weight
+     */
+    private case class TermMatch(termId: Option[String], usedEntities: 
Seq[IntentEntity], weight: Weight):
+        private lazy val maxIndex: Int = 
usedEntities.map(_.entity.getTokens.asScala.map(_.getIndex).max).max
+
+        def after(tm: TermMatch): Boolean = maxIndex > tm.maxIndex
+
+    /**
+      *
+      * @param entities
+      */
+    private case class PredicateMatch(entities: Seq[IntentEntity], weight: 
Weight)
+
+    /**
+     *
+     * @param term
+     * @param usedEntities
+     */
+    private case class TermEntitiesGroup(
+        term: NCIDLTerm,
+        usedEntities: Seq[IntentEntity]
+    )
+
+    /**
+     *
+     * @param entityGroups
+     * @param weight
+     * @param intent
+     */
+    private case class IntentMatch(
+        entityGroups: List[TermEntitiesGroup],
+        weight: Weight,
+        intent: NCIDLIntent
+    )
+
+    /**
+      *
+      * @param intentMatch
+      * @param callback
+      * @param variant
+      * @param variantIdx
+      */
+    private case class MatchHolder(
+        intentMatch: IntentMatch, // Match.
+        callback: NCIntentMatch => NCResult, // Callback function.
+        variant: NCIntentSolverVariant, // Variant used for the match.
+        variantIdx: Int // Variant index.
+    )
+
+    /**
+     * Main entry point for intent engine.
+     *
+     * @param ctx Query context.
+     * @param intents Intents to match for.
+     * @return
+     */
+    private def solveIntents(ctx: NCContext, intents: Map[NCIDLIntent, 
NCIntentMatch => NCResult]): List[NCIntentSolverResult] =
+        dialog.ack(ctx.getRequest.getUserId)
+
+        val matches = mutable.ArrayBuffer.empty[MatchHolder]
+
+        // Find all matches across all intents and sentence variants.
+        for (
+            (vrn, vrnIdx) <- ctx.getVariants.asScala.zipWithIndex;
+            ents = vrn.getEntities.asScala;
+            varEnts = ents.map(IntentEntity(false, false, _)).toSeq;
+            varEntsGroups = ents.map(t => if t.getGroups != null then 
t.getGroups.asScala else Set.empty[String]);
+            (intent, callback) <- intents
+        )
+            val convEnts: Seq[IntentEntity] =
+                if intent.terms.exists(_.conv) then
+                    // We do not mix tokens with same group from the 
conversation and given sentence.
+                    ctx.getConversation.getStm.asScala.toSeq.
+                        map(ent => ent -> (if ent.getGroups == null then 
Set.empty[String] else ent.getGroups.asScala)).
+                        filter { (ent, entGroups)  => 
!varEntsGroups.exists(_.subsetOf(entGroups)) }.
+                        map { (e, _) => IntentEntity(used = false, conv = 
true, e) }
+                else
+                    Seq.empty
+
+            // Solve intent in isolation.
+            solveIntent(ctx, intent, varEnts, convEnts, vrnIdx) match
+                case Some(intentMatch) => matches += MatchHolder(intentMatch, 
callback, NCIntentSolverVariant(vrn.getEntities.asScala.toSeq), vrnIdx)
+                case None => // No-op.
+
+        val sorted = matches.sortWith((m1: MatchHolder, m2: MatchHolder) =>
+            // 1. First with maximum weight.
+            m1.intentMatch.weight.compare(m2.intentMatch.weight) match { // Do 
not drop this bracket (IDE confused)
+                case x1 if x1 < 0 => false
+                case x1 if x1 > 0 => true
+                case x1 =>
+                    require(x1 == 0)
+
+                    logEqualMatches(m1, m2)
+
+                    // 2. First with maximum variant.
+                    m1.variant.compareTo(m2.variant) match
+                        case x2 if x2 < 0 => false
+                        case x2 if x2 > 0 => true
+                        case x2 =>
+                            require(x2 == 0)
+
+                            def calcHash(m: MatchHolder): Int =
+                                val variantPart =
+                                    m.variant.
+                                        entities.
+                                        map(t => 
s"${t.getId}${t.getGroups}${t.mkText()}").
+                                        mkString("")
+
+                                val intentPart = m.intentMatch.intent.toString
+
+                                (variantPart, intentPart).##
+
+                            // Order doesn't make sense here.
+                            // It is just to provide deterministic result for 
the matches with the same weights.
+                            calcHash(m1) > calcHash(m2)
+            }
+        )
+
+        logMatches(sorted)
+
+        sorted.map(m =>
+            NCIntentSolverResult(
+                m.intentMatch.intent.id,
+                m.callback,
+                m.intentMatch.entityGroups.map(grp => 
NCIntentEntitiesGroup(grp.term.id, grp.usedEntities.map(_.entity))),
+                m.variant,
+                m.variantIdx
+            )
+        ).toList
+
+    /**
+      *
+      * @param matches
+      */
+    private def logMatches(matches: ArrayBuffer[MatchHolder]): Unit =
+        if matches.nonEmpty then
+            val tbl = NCAsciiTable("Variant", "Intent", "Term Entities", 
"Intent Match Weight")
+
+            for (m <- matches)
+                val im = m.intentMatch
+                val w = im.weight
+                val ents = mutable.ListBuffer.empty[String]
+
+                ents += s"intent=${im.intent.id}"
+                var grpIdx = 0
+
+                for (grp <- im.entityGroups)
+                    ents += s"  ${grp.term.toString}"
+                    grpIdx += 1
+
+                    if grp.usedEntities.nonEmpty then
+                        var entIdx = 0
+                        for (e <- grp.usedEntities)
+                            val conv = if e.conv then "(conv) " else ""
+                            ents += s"    #$entIdx: 
$conv${e.entity.getId}(${e.entity.mkText()})"
+                            entIdx += 1
+                    else
+                        ents += "    <empty>"
+
+                if m == matches.head then
+                    tbl += (
+                        Seq(s"#${m.variantIdx + 1}", "<|best match|>"), 
Seq(im.intent.id, "<|best match|>"), ents, w
+                    )
+                else
+                    tbl += (
+                        s"#${m.variantIdx + 1}", im.intent.id, ents, w
+                    )
+
+            tbl.info(
+                logger,
+                Option(s"Found ${matches.size} matching ${if matches.size > 1 
then "intents"else "intent"} (sorted best to worst):")
+            )
+        else
+            logger.info(s"No matching intent found:")
+            logger.info(s"  +-- Turn on DEBUG log level to see more details.")
+
+    /**
+      *
+      * @param m1
+      * @param m2
+      */
+    private def logEqualMatches(m1: MatchHolder, m2: MatchHolder): Unit =
+        val mw1 = m1.intentMatch.weight
+        val mw2 = m2.intentMatch.weight
+        val v1 = m1.variant
+        val v2 = m2.variant
+
+        val tbl = new NCAsciiTable()
+
+        tbl += (s"${"Intent ID"}", m1.intentMatch.intent.id, 
m2.intentMatch.intent.id)
+        tbl += (s"${"Variant #"}", m1.variantIdx + 1, m2.variantIdx + 1)
+        tbl += (s"${"Intent Match Weight"}", mw1.toString, mw2.toString)
+        tbl += (s"${"Variant Weight"}", v1.toString, v2.toString)
+
+        logger.warn(s"""Two matching intents have the same weight for their 
matches (variants weight will be used further):${tbl.toString}""")
+
+    /**
+     *
+     * @param intent
+     * @param senEnts
+     * @param convEnts
+     * @return
+     */
+    private def solveIntent(
+        ctx: NCContext, intent: NCIDLIntent, senEnts: Seq[IntentEntity], 
convEnts: Seq[IntentEntity], varIdx: Int
+    ): Option[IntentMatch] =
+        val intentId = intent.id
+        val opts = intent.options
+        val flow = dialog.getDialogFlow(ctx.getRequest.getUserId)
+        val varStr = s"(variant #${varIdx + 1})"
+        val flowRegex = intent.flowRegex
+
+        // Check dialog flow regex first, if any.
+        val flowMatched: Boolean =
+            intent.flowRegex match
+                case Some(regex) =>
+                    val flowStr = 
flow.map(_.getIntentMatch.getIntentId).mkString(" ")
+
+                    def process(matched: Boolean): Boolean =
+                        val s = if matched then "matched" else "did not match"
+                        logger.info(s"Intent '$intentId' $s regex dialog flow 
$varStr:")
+                        logger.info(s"  |-- ${"Intent IDs  :"} $flowStr")
+                        logger.info(s"  +-- ${"Match regex :"} 
${regex.toString}")
+
+                        matched
+
+                    process(regex.matcher(flowStr).find(0))
+                case None => true
+
+        if flowMatched then
+            val intentW = new Weight()
+            val intentGrps = mutable.ArrayBuffer.empty[TermEntitiesGroup]
+            var abort = false
+            var lastTermMatch: TermMatch = null
+            val sess = ctx.getConversation.getSession // Conversation metadata 
(shared across all terms).
+            val convMeta = sess.keysSet().asScala.map(k => k -> 
sess.get(k).asInstanceOf[Object]).toMap
+            val ents = senEnts.map(_.entity)
+
+            // Check terms.
+            for (term <- intent.terms if !abort)
+                // Fresh context for each term.
+                val idlCtx = NCIDLContext(
+                    ctx.getModelConfig,
+                    ents,
+                    intentMeta = intent.meta,
+                    convMeta = convMeta,
+                    req = ctx.getRequest,
+                    vars = mutable.HashMap.empty[String, NCIDLFunction] ++ 
term.decls
+                )
+
+                solveTerm(term, idlCtx, senEnts, if term.conv then convEnts 
else Seq.empty) match
+                    case Some(termMatch) =>
+                        if opts.ordered && lastTermMatch != null && 
!termMatch.after(lastTermMatch) then
+                            abort = true
+                        else
+                            // Term is found.
+                            // Add its weight and grab its entities.
+                            intentW += termMatch.weight
+                            intentGrps += TermEntitiesGroup(term, 
termMatch.usedEntities)
+                            lastTermMatch = termMatch
+
+                            logMatch(intent, term, termMatch)
+                    case None =>
+                        // Term is missing. Stop further processing for this 
intent. This intent cannot be matched.
+                        logger.debug(s"Intent '$intentId' did not match 
because of unmatched term '$term' $varStr.")
+
+                        abort = true
+
+            if abort then
+                None
+            else
+                val usedSenEnts = senEnts.filter(_.used)
+                val unusedSenEnts = senEnts.filter(!_.used)
+                val usedConvEnts = convEnts.filter(_.used)
+                val usedToks = usedSenEnts.flatMap(_.entity.getTokens.asScala)
+                val unusedToks = ctx.getTokens.asScala.filter(p => 
!usedToks.contains(p))
+
+                if !opts.allowStmEntityOnly && usedSenEnts.isEmpty && 
usedConvEnts.nonEmpty then
+                    logger.info(
+                        s"Intent '$intentId' did not match because all its 
matched tokens came from STM $varStr. See intent 'allowStmEntityOnly' option."
+                    )
+
+                    None
+                else if !opts.ignoreUnusedFreeWords && unusedToks.nonEmpty then
+                    logger.info(
+                        s"Intent '$intentId' did not match because of unused 
free words $varStr. See intent 'ignoreUnusedFreeWords' option. Unused free 
words indexes: ${unusedToks.map(_.getIndex).mkString("{", ",", "}")}"
+                    )
+
+                    None
+                else
+                    if usedSenEnts.isEmpty && usedConvEnts.isEmpty then
+                        logger.warn(s"Intent '$intentId' matched but no 
entities were used $varStr.")
+
+                    // Number of remaining (unused) non-free words in the 
sentence is a measure of exactness of the match.
+                    // The match is exact when all non-free words are used in 
that match.
+                    // Negate to make sure the bigger (smaller negative 
number) is better.
+                    // TODO: check formula.
+                    val nonFreeWordNum = -(ctx.getTokens.size() - 
senEnts.map(_.entity.getTokens.size()).sum)
+
+                    intentW.prepend(nonFreeWordNum)
+
+                    Option(IntentMatch(entityGroups = intentGrps.toList, 
weight = intentW, intent = intent))
+        else
+            None
+
+    /**
+      *
+      * @param intent
+      * @param term
+      * @param termMatch
+      */
+    private def logMatch(intent: NCIDLIntent, term: NCIDLTerm, termMatch: 
TermMatch): Unit =
+        val tbl = NCAsciiTable()
+
+        val w = termMatch.weight.toSeq
+
+        tbl += ("Intent ID", s"${intent.id}")
+        tbl += ("Matched Term", term)
+        tbl += (
+            "Matched Entities",
+            termMatch.usedEntities.map(t =>
+                val txt = t.entity.mkText()
+                val idx = 
t.entity.getTokens.asScala.map(_.getIndex).mkString("{", ",", "}")
+
+                s"$txt${s"[$idx]"}").mkString(" ")
+        )
+        tbl += (
+            s"Term Match Weight", s"${"<"}${w.head}, ${w(1)}, ${w(2)}, 
${w(3)}, ${w(4)}, ${w(5)}${">"}"
+        )
+
+        tbl.debug(logger, Option("Term match found:"))
+
+    /**
+     * Solves term.
+     *
+     * @param term
+     * @param idlCtx
+     * @param convEnts
+     * @param senEnts
+     * @return
+     */
+    private def solveTerm(
+        term: NCIDLTerm,
+        idlCtx: NCIDLContext,
+        senEnts: Seq[IntentEntity],
+        convEnts: Seq[IntentEntity]
+    ): Option[TermMatch] =
+        if senEnts.isEmpty && convEnts.isEmpty then
+            logger.warn(s"No entities available to match on for the term 
'$term'.")
+
+        try
+            solvePredicate(term, idlCtx, senEnts, convEnts) match
+                case Some(pm) =>
+                    Option(
+                        TermMatch(
+                            term.id,
+                            pm.entities,
+                            // If term match is non-empty we add the following 
weights:
+                            //   - min
+                            //   - delta between specified max and normalized 
max (how close the actual quantity was to the specified one).
+                            //   - normalized max
+                            // NOTE: 'usedEntities' can be empty.
+                            pm.weight.
+                                append(term.min).
+                                append(-(term.max - pm.entities.size)).
+                                // Normalize max quantifier in case of unbound 
max.
+                                append(if term.max == Integer.MAX_VALUE then 
pm.entities.size else term.max)
+                        )
+                    )
+                // Term not found at all.
+                case None => None
+        catch case e: Exception => throw new NCException(s"Runtime error 
processing IDL term: $term", e)
+
+    /**
+     * Solves term's predicate.
+     *
+     * @param term
+     * @param idlCtx
+     * @param senEnts
+     * @param convEnts
+     * @return
+     */
+    private def solvePredicate(
+        term: NCIDLTerm,
+        idlCtx: NCIDLContext,
+        senEnts: Seq[IntentEntity],
+        convEnts: Seq[IntentEntity]
+    ): Option[PredicateMatch] =
+        // Algorithm is "hungry", i.e. it will fetch all entities satisfying 
item's predicate
+        // in entire sentence even if these entities are separated by other 
already used entities
+        // and conversation will be used only to get to the 'max' number of 
the item.
+        val usedEnts = mutable.ArrayBuffer.empty[IntentEntity]
+        var usesSum = 0
+        var matchesCnt = 0
+
+        // Collect to the 'max' from sentence & conversation, if possible.
+        for (ents <- Seq(senEnts, convEnts); ent <- ents.filter(!_.used) if 
usedEnts.lengthCompare(term.max) < 0)
+            // TODO: idx == matchesCnt - ok?
+            val NCIDLStackItem(res, uses) = 
term.pred.apply(NCIDLEntity(ent.entity, matchesCnt), idlCtx)
+
+            res match
+                case b: java.lang.Boolean =>
+                    if b then
+                        matchesCnt += 1
+                        if uses > 0 then
+                            usesSum += uses
+                            usedEnts += ent
+
+                case _ => throw new NCException(s"Predicate returned 
non-boolean result: $res")
+
+        // We couldn't collect even 'min' matches.
+        if matchesCnt < term.min then
+            None
+        // Term is optional (min == 0) and no matches found (valid result).
+        else if matchesCnt == 0 then
+            require(term.min == 0)
+            require(usedEnts.isEmpty)
+
+            Option(PredicateMatch(List.empty, new Weight(0, 0, 0)))
+        // We've found some matches (and min > 0).
+        else
+            // Number of entities from the current sentence.
+            val senTokNum = usedEnts.count(e => !convEnts.contains(e))
+
+            // Sum of conversation depths for each entities from the 
conversation.
+            // Negated to make sure that bigger (smaller negative number) is 
better.
+            // TODO: check formula.
+            def getConversationDepth(e: IntentEntity): Option[Int] =
+                val depth = convEnts.indexOf(e)
+                Option.when(depth >= 0)(depth + 1)
+
+            val convDepthsSum = -usedEnts.flatMap(getConversationDepth).sum
+            
+            // Mark found entities as used.
+            for (e <- usedEnts) e.used = true
+
+            Option(PredicateMatch(usedEnts.toSeq, new Weight(senTokNum, 
convDepthsSum, usesSum)))
+
+    /**
+      *
+      * @param slvIn Intent solver input.
+      * @param span Parent span.
+      * @return
+      * @throws NCRejection
+      */
+    private def solveIteration(slvIn: NCIntentSolverInput): Option[NCResult] =
+        // Should it be an assertion?
+        if intents.isEmpty then throw new NCRejection("Intent solver has no 
registered intents.")
+
+        val ctx = slvIn.context
+        val req = ctx.getRequest
+
+        val intentResults =
+            try solveIntents(ctx, intents)
+            catch case e: Exception => throw new NCRejection("Processing 
failed due to unexpected error.", e)
+
+        if intentResults.isEmpty then throw new NCRejection("No matching 
intent found.")
+
+        object Loop:
+            private var data: Option[Option[NCResult]] = None
+            private var stopped: Boolean = false
+
+            def hasNext: Boolean = !stopped
+            def finish(data: Option[NCResult]): Unit =
+                Loop.data = Option(data)
+                Loop.stopped = true
+            def result: Option[NCResult] = data.getOrElse(throw new 
NCRejection("No matching intent found - all intents were skipped."))
+
+        for (intentRes <- intentResults.filter(_ != null) if Loop.hasNext)
+            val intentMatch: NCIntentMatch =
+                new NCIntentMatch:
+                    override val getContext: NCContext = ctx
+                    override val getIntentId: String = intentRes.intentId
+                    override val getIntentEntities: JList[JList[NCEntity]] = 
intentRes.groups.map(_.entities).map(_.asJava).asJava
+                    override def getTermEntities(idx: Int): JList[NCEntity] = 
intentRes.groups(idx).entities.asJava
+                    override def getTermEntities(termId: String): 
JList[NCEntity] =
+                        intentRes.groups.find(_.termId === termId) match
+                            case Some(g) => g.entities.asJava
+                            case None => Collections.emptyList()
+                    override val getVariant: NCVariant =
+                        new NCVariant:
+                            override def getEntities: JList[NCEntity] = 
intentRes.variant.entities.asJava
+            try
+                if slvIn.model.onMatchedIntent(intentMatch) then
+                    // This can throw NCIntentSkip exception.
+                    val cbRes = intentRes.fn(intentMatch)
+
+                    // Store won intent match in the input.
+                    slvIn.intentMatch = intentMatch
+
+                    if cbRes.getIntentId == null then
+                        cbRes.setIntentId(intentRes.intentId)
+
+                    logger.info(s"Intent '${intentRes.intentId}' for variant 
#${intentRes.variantIdx + 1} selected as the <|best match|>")
+
+                    dialog.addMatchedIntent(intentMatch, cbRes, ctx)
+
+                    Loop.finish(Option(cbRes))
+                else
+                    logger.info(
+                        s"Model '${ctx.getModelConfig.getId}' triggered 
rematching of intents by intent '${intentRes.intentId}' on variant 
#${intentRes.variantIdx + 1}."
+                    )
+
+                    Loop.finish(None)
+                catch
+                    case e: NCIntentSkip =>
+                        // No-op - just skipping this result.
+                        e.getMessage match
+                            case s if s != null => logger.info(s"Selected 
intent '${intentRes.intentId}' skipped: $s")
+                            case _ => logger.info(s"Selected intent 
'${intentRes.intentId}' skipped.")
+
+        Loop.result
+
+    /**
+      *
+      * @param in
+      * @param span
+      * @return
+      * @throws NCRejection
+      */
+    def solve(in: NCIntentSolverInput): NCResult =
+        var res: NCResult = null
+
+        while (res == null)
+            solveIteration(in) match
+                case Some(iterRes) => res = iterRes
+                case None => // No-op.
+
+        res
\ No newline at end of file
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroCompiler.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroCompiler.scala
index 332139a..a385f90 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroCompiler.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/makro/NCMacroCompiler.scala
@@ -115,7 +115,9 @@ object NCMacroCompiler extends LazyLogging:
 
             if ctx.minMaxShortcut() != null then
                 ctx.minMaxShortcut().getText match
-                    case "?" => min = 0; max = 1
+                    case "?" =>
+                        min = 0
+                        max = 1
                     case c => throw compilerError(s"Invalid min/max shortcut 
'$c' in: ${ctx.getText}")
             else if ctx.MINMAX() != null then
                 var s = ctx.MINMAX().getText
@@ -126,15 +128,11 @@ object NCMacroCompiler extends LazyLogging:
                 if comma == -1 || comma == 0 || comma == s.length - 1 then
                     throw compilerError(s"Invalid min/max quantifier: $orig")
 
-                try
-                    min = java.lang.Integer.parseInt(s.substring(0, 
comma).trim)
-                catch
-                    case _: NumberFormatException => throw 
compilerError(s"Invalid min quantifier: $orig")
+                try min = java.lang.Integer.parseInt(s.substring(0, 
comma).trim)
+                catch case _: NumberFormatException => throw 
compilerError(s"Invalid min quantifier: $orig")
 
-                try
-                    max = java.lang.Integer.parseInt(s.substring(comma + 
1).trim)
-                catch
-                    case _: NumberFormatException => throw 
compilerError(s"Invalid max quantifier: $orig")
+                try max = java.lang.Integer.parseInt(s.substring(comma + 
1).trim)
+                catch case _: NumberFormatException => throw 
compilerError(s"Invalid max quantifier: $orig")
 
             if min < 0 || max < 0 || min > max || max == 0 || max > MAX_QTY 
then
                 throw compilerError(s"[$min,$max] quantifiers should be 'max 
>= min, min >= 0, max > 0, max <= $MAX_QTY'.")
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/util/NCUtils.scala 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/util/NCUtils.scala
index 73b898a..59b259e 100644
--- a/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/util/NCUtils.scala
+++ b/nlpcraft/src/main/scala/org/apache/nlpcraft/internal/util/NCUtils.scala
@@ -20,12 +20,15 @@ package org.apache.nlpcraft.internal.util
 import com.typesafe.scalalogging.*
 import org.apache.nlpcraft.*
 import com.google.gson.*
+
 import java.io.*
 import java.net.*
-import java.util.concurrent.{CopyOnWriteArrayList, ExecutorService, TimeUnit} 
// Avoids conflicts.
+import java.time.{ZoneId, Instant, ZonedDateTime}
+import java.util.concurrent.{CopyOnWriteArrayList, ExecutorService, TimeUnit}
 import java.util.regex.Pattern
 import java.util.zip.*
-import java.util.{Random, UUID}
+import java.util.{Random, TimeZone}
+
 import scala.annotation.tailrec
 import scala.collection.{IndexedSeq, Seq, mutable}
 import scala.concurrent.*
@@ -41,6 +44,7 @@ import scala.util.Using
 object NCUtils extends LazyLogging:
     final val NL = System getProperty "line.separator"
     private val RND = new Random()
+    private final val UTC = ZoneId.of("UTC")
     private val sysProps = new SystemProperties
     private final lazy val GSON = new 
GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create()
 
@@ -199,6 +203,16 @@ object NCUtils extends LazyLogging:
         catch case e: Exception => E(s"Cannot extract JSON field '$field' 
from: '$json'", e)
 
     /**
+      * Gets now in UTC timezone.
+      */
+    def nowUtc(): ZonedDateTime = ZonedDateTime.now(UTC)
+
+    /**
+      * Gets now in UTC timezone in milliseconds representation.
+      */
+    def nowUtcMs(): Long = Instant.now().toEpochMilli
+
+    /**
       * Shortcut - current timestamp in milliseconds.
       */
     def now(): Long = System.currentTimeMillis()
@@ -333,7 +347,9 @@ object NCUtils extends LazyLogging:
             @volatile private var stopped = false
 
             override def isInterrupted: Boolean = super.isInterrupted || 
stopped
-            override def interrupt(): Unit =  stopped = true; super.interrupt()
+            override def interrupt(): Unit =
+                stopped = true
+                super.interrupt()
 
             override def run(): Unit =
                 logger.trace(s"Thread started: $name")
@@ -348,6 +364,15 @@ object NCUtils extends LazyLogging:
                     stopped = true
 
     /**
+      *
+      * @param prefix
+      * @param mdlId
+      * @param body
+      * @return
+      */
+    def mkThread(prefix: String, mdlId: String)(body: Thread => Unit): Thread 
= mkThread(s"$prefix-@$mdlId")(body)
+
+    /**
       * Gets resource existing flag.
       *
       * @param res Resource.
@@ -389,15 +414,6 @@ object NCUtils extends LazyLogging:
         else E(s"Source not found or unsupported: $src")
 
     /**
-      * Makes thread.
-      *
-      * @param name Name.
-      * @param body Thread body.
-      */
-    def mkThread(name: String, body: Runnable): Thread =
-        mkThread(name) { _ => body.run() }
-
-    /**
       * Sleeps number of milliseconds properly handling exceptions.
       *
       * @param delay Number of milliseconds to sleep.
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/en/impl/NCBracketsTokenEnricherImpl.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/en/impl/NCBracketsTokenEnricherImpl.scala
index 3a88225..cd5f3bf 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/en/impl/NCBracketsTokenEnricherImpl.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/enricher/en/impl/NCBracketsTokenEnricherImpl.scala
@@ -39,11 +39,21 @@ class NCBracketsTokenEnricherImpl extends NCTokenEnricher 
with LazyLogging:
 
         for (t <- toks.asScala if ok)
             t.getText match
-                case "(" | "{" | "[" | "<" => mark(t); stack.push(t.getText)
-                case ")" => check("("); mark(t)
-                case "}" => check("{"); mark(t)
-                case "]" => check("["); mark(t)
-                case ">" => check("<"); mark(t)
+                case "(" | "{" | "[" | "<" =>
+                    mark(t)
+                    stack.push(t.getText)
+                case ")" =>
+                    check("(")
+                    mark(t)
+                case "}" =>
+                    check("{")
+                    mark(t)
+                case "]" =>
+                    check("[")
+                    mark(t)
+                case ">" =>
+                    check("<")
+                    mark(t)
                 case _ => mark(t)
 
         if ok && stack.isEmpty then map.foreach { (tok, b) => 
tok.put("brackets", b) }
diff --git 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/parser/opennlp/impl/NCOpenNLPTokenParserImpl.scala
 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/parser/opennlp/impl/NCOpenNLPTokenParserImpl.scala
index 44adafd..c1074fb 100644
--- 
a/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/parser/opennlp/impl/NCOpenNLPTokenParserImpl.scala
+++ 
b/nlpcraft/src/main/scala/org/apache/nlpcraft/nlp/token/parser/opennlp/impl/NCOpenNLPTokenParserImpl.scala
@@ -51,18 +51,18 @@ class NCOpenNLPTokenParserImpl(tokMdl: String,  posMdlSrc: 
String, lemmaDicSrc:
 
     private def init(): Unit =
         NCUtils.execPar(
-            () =>
-                tagger = new POSTaggerME(new 
POSModel(NCUtils.getStream(posMdlSrc)));
+            () => {
+                tagger = new POSTaggerME(new 
POSModel(NCUtils.getStream(posMdlSrc)))
                 logger.trace(s"Loaded resource: $posMdlSrc")
-            ,
-            () =>
-                lemmatizer = new 
DictionaryLemmatizer(NCUtils.getStream(lemmaDicSrc));
+            },
+            () => {
+                lemmatizer = new 
DictionaryLemmatizer(NCUtils.getStream(lemmaDicSrc))
                 logger.trace(s"Loaded resource: $lemmaDicSrc")
-            ,
-            () =>
-                tokenizer = new TokenizerME(new 
TokenizerModel(NCUtils.getStream(tokMdl)));
+            },
+            () => {
+                tokenizer = new TokenizerME(new 
TokenizerModel(NCUtils.getStream(tokMdl)))
                 logger.trace(s"Loaded resource: $tokMdl")
-
+            }
         )(ExecutionContext.Implicits.global)
 
     override def tokenize(text: String): JList[NCToken] =
diff --git 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/conversation/NCConversationManagerSpec.scala
 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/conversation/NCConversationManagerSpec.scala
new file mode 100644
index 0000000..c402a99
--- /dev/null
+++ 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/conversation/NCConversationManagerSpec.scala
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.internal.conversation
+
+import org.apache.nlpcraft.*
+import org.apache.nlpcraft.nlp.util.*
+import org.junit.jupiter.api.Test
+
+import java.util.function.Predicate
+
+/**
+  *
+  */
+class NCConversationManagerSpec:
+    case class ModelConfigMock(timeout: Long = Long.MaxValue) extends 
NCModelConfig("testId", "test", "1.0", "Test description", "Test origin"):
+        override def getConversationTimeout: Long = timeout
+
+    @Test
+    def test(): Unit =
+        val mgr = NCConversationManager(ModelConfigMock())
+        val t = NCTestToken()
+        val reqId = "req1"
+
+        val conv = mgr.getConversation("user1")
+
+        def checkSize(size: Int): Unit =
+            require(conv.getEntities.sizeIs == size, s"Unexpected entities 
size: ${conv.getEntities.size}, expected: $size")
+
+        // Initial empty.
+        checkSize(0)
+
+        // Added. Still empty.
+        conv.addEntities(reqId, Seq(NCTestEntity("e1", reqId, tokens = t), 
NCTestEntity("e2", reqId, tokens = t)))
+        checkSize(0)
+
+        // Updated. Not empty.
+        conv.updateEntities()
+        checkSize(2)
+
+        // Partially cleared.
+        conv.clear(_.getId == "e1")
+        checkSize(1)
+        require(conv.getEntities.head.getId == "e2")
+
+    @Test
+    def testTimeout(): Unit =
+        val timeout = 1000
+
+        val mgr = NCConversationManager(ModelConfigMock(timeout))
+        val t = NCTestToken()
+        val reqId = "req1"
+
+        // TODO: Drop method and use saved conversation instead - error is 
thrown
+        def getConversation: NCConversationData = mgr.getConversation("user1")
+
+        def checkSize(size: Int): Unit =
+            val conv = getConversation
+            require(conv.getEntities.sizeIs == size, s"Unexpected entities 
size: ${conv.getEntities.size}, expected: $size")
+
+        // Initial empty.
+        checkSize(0)
+
+        // Added. Still empty.
+        getConversation.addEntities(reqId, Seq(NCTestEntity("e1", reqId, 
tokens = t), NCTestEntity("e2", reqId, tokens = t)))
+        checkSize(0)
+
+        // Updated. Not empty.
+        getConversation.updateEntities()
+        checkSize(2)
+
+        // Cleared by timeout.
+        try
+            mgr.start()
+            Thread.sleep(timeout * 2)
+            checkSize(0)
+        finally
+            mgr.close()
+
diff --git 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/dialogflow/NCDialogFlowManagerSpec.scala
 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/dialogflow/NCDialogFlowManagerSpec.scala
new file mode 100644
index 0000000..b8d0736
--- /dev/null
+++ 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/dialogflow/NCDialogFlowManagerSpec.scala
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.internal.dialogflow
+
+import org.apache.nlpcraft.*
+import org.apache.nlpcraft.internal.util.NCUtils
+import org.apache.nlpcraft.nlp.util.NCTestRequest
+import org.junit.jupiter.api.*
+
+import java.util
+import java.util.function.Predicate
+
+/**
+  *
+  */
+class NCDialogFlowManagerSpec:
+    case class IntentMatchMock(intentId: String, ctx: NCContext) extends 
NCIntentMatch:
+        override val getContext: NCContext = ctx
+        override val getIntentId: String = intentId
+        override val getIntentEntities: util.List[util.List[NCEntity]] = null
+        override def getTermEntities(idx: Int): util.List[NCEntity] = null
+        override def getTermEntities(termId: String): util.List[NCEntity] = 
null
+        override val getVariant: NCVariant = null
+
+    case class ContextMock(userId: String, reqTs: Long = NCUtils.now()) 
extends NCContext:
+        override def isOwnerOf(ent: NCEntity): Boolean = false
+        override def getModelConfig: NCModelConfig = null
+        override def getRequest: NCRequest = NCTestRequest(txt = "Any", userId 
= userId, ts = reqTs)
+        override def getConversation: NCConversation = null
+        override def getVariants: util.Collection[NCVariant] = null
+        override def getTokens: util.List[NCToken] = null
+
+    case class ModelConfigMock(timeout: Long = Long.MaxValue) extends 
NCModelConfig("testId", "test", "1.0", "Test description", "Test origin"):
+        override def getConversationTimeout: Long = timeout
+
+    private var mgr: NCDialogFlowManager = _
+
+    /**
+      *
+      * @param expSizes
+      */
+    private def check(expSizes: (String, Int)*): Unit =
+        for ((usrId, expSize) <- expSizes)
+            val size = mgr.getDialogFlow(usrId).size
+            require(size == expSize, s"Expected: $expSize for '$usrId', but 
found: $size")
+
+    /**
+      *
+      * @param userIds
+      */
+    private def ask(userIds: String*): Unit = for (userId <- userIds) 
mgr.ack(userId)
+
+    /**
+      *
+      * @param id
+      * @param ctx
+      */
+    private def addMatchedIntent(id: String, ctx: NCContext): Unit = 
mgr.addMatchedIntent(IntentMatchMock(id, ctx), null, ctx)
+
+    /**
+      *
+      */
+    @AfterEach
+    def cleanUp(): Unit = if mgr != null then mgr.close()
+
+    @Test
+    def test(): Unit =
+        mgr = NCDialogFlowManager(ModelConfigMock())
+
+        val now = NCUtils.now()
+
+        addMatchedIntent("i11", ContextMock("user1"))
+        addMatchedIntent("i12", ContextMock("user1"))
+        addMatchedIntent("i21", ContextMock("user2"))
+        addMatchedIntent("i22", ContextMock("user2"))
+        addMatchedIntent("i31", ContextMock("user3"))
+
+        // Initial.
+        ask("user1", "user2", "user3", "user4")
+        check("user1" -> 2, "user2" -> 2, "user3" -> 1, "user4" -> 0)
+
+        mgr.clear(usrId = "user4")
+        check("user1" -> 2, "user2" -> 2, "user3" -> 1, "user4" -> 0)
+
+        mgr.clear(usrId = "user1")
+        check("user1" -> 0, "user2" -> 2, "user3" -> 1, "user4" -> 0)
+
+        mgr.clear(usrId = "user2", _.getIntentMatch.getIntentId == "i21")
+        check("user1" -> 0, "user2" -> 1, "user3" -> 1, "user4" -> 0)
+
+        mgr.clear(usrId = "user2")
+        mgr.clear(usrId = "user3")
+        check("user1" -> 0, "user2" -> 0, "user3" -> 0, "user4" -> 0)
+
+    @Test
+    def testTimeout(): Unit =
+        val delay = 10
+        val timeout = delay * 1000
+
+        mgr = NCDialogFlowManager(ModelConfigMock(timeout))
+
+        val now = NCUtils.now()
+
+        addMatchedIntent("any", ContextMock("user1", now))
+        addMatchedIntent("any", ContextMock("user1", now - timeout - delay))
+        addMatchedIntent("any", ContextMock("user2", now - timeout))
+
+        // Initial.
+        ask("user1", "user2")
+        check("user1" -> 2, "user2" -> 1)
+
+        mgr.start()
+
+        Thread.sleep(delay * 5)
+        check("user1" -> 1, "user2" -> 0)
+
+        mgr.close()
+        check("user1" -> 0, "user2" -> 0)
diff --git 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/NCModelClientSpec.scala
 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/NCModelClientSpec.scala
new file mode 100644
index 0000000..95a0222
--- /dev/null
+++ 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/NCModelClientSpec.scala
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.nlpcraft.internal.impl
+
+import org.apache.nlpcraft.*
+import org.apache.nlpcraft.nlp.entity.parser.semantic.NCSemanticEntityParser
+import 
org.apache.nlpcraft.nlp.entity.parser.semantic.impl.en.NCEnSemanticPorterStemmer
+import org.apache.nlpcraft.nlp.util.NCTestModelAdapter
+import org.apache.nlpcraft.nlp.util.opennlp.*
+import org.junit.jupiter.api.Test
+
+import scala.jdk.CollectionConverters.*
+import scala.util.Using
+
+class NCModelClientSpec:
+    private def test0(mdl: NCTestModelAdapter): Unit =
+        mdl.getPipeline.getEntityParsers.add(
+            new NCSemanticEntityParser(
+                new NCEnSemanticPorterStemmer,
+                EN_PIPELINE.getTokenParser,
+                "models/lightswitch_model.yaml"
+            )
+        )
+
+        Using.resource(new NCModelClient(mdl)) { client =>
+            val res = client.ask("Lights on at second floor kitchen", null, 
"userId")
+
+            println(s"Intent: ${res.getIntentId}")
+            println(s"Body: ${res.getBody}")
+
+            client.validateSamples()
+        }
+    /**
+      *
+      */
+    @Test
+    def test(): Unit =
+        test0(
+            new NCTestModelAdapter():
+                @NCIntentSample(Array("Lights on at second floor kitchen", 
"Invalid sample"))
+                @NCIntent("intent=ls term(act)={# == 'ls:on'} term(loc)={# == 
'ls:loc'}*")
+                def onMatch(@NCIntentTerm("act") act: NCEntity, 
@NCIntentTerm("loc") locs: List[NCEntity]): NCResult = new NCResult()
+        )
+
+    @Test
+    def test2(): Unit =
+        test0(
+            new NCTestModelAdapter():
+                @NCIntent("intent=ls term(act)={has(ent_groups, 'act')} 
term(loc)={# == 'ls:loc'}*")
+                @NCIntentSample(Array("Lights on at second floor kitchen", 
"Invalid sample"))
+                def onMatch(@NCIntentTerm("act") act: NCEntity, 
@NCIntentTerm("loc") locs: List[NCEntity]): NCResult = new NCResult()
+        )
+
diff --git 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/NCModelPipelineProcessorSpec.scala
 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineManagerSpec.scala
similarity index 54%
rename from 
nlpcraft/src/test/scala/org/apache/nlpcraft/internal/NCModelPipelineProcessorSpec.scala
rename to 
nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineManagerSpec.scala
index 9a63b4b..76371c0 100644
--- 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/NCModelPipelineProcessorSpec.scala
+++ 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/NCModelPipelineManagerSpec.scala
@@ -15,10 +15,9 @@
  * limitations under the License.
  */
 
-package org.apache.nlpcraft.internal
+package org.apache.nlpcraft.internal.impl
 
 import org.apache.nlpcraft.*
-import org.apache.nlpcraft.internal.impl.NCModelPipelineProcessor
 import org.apache.nlpcraft.nlp.entity.parser.nlp.impl.NCNLPEntityParserImpl
 import org.apache.nlpcraft.nlp.entity.parser.semantic.*
 import 
org.apache.nlpcraft.nlp.entity.parser.semantic.impl.en.NCEnSemanticPorterStemmer
@@ -36,7 +35,7 @@ import scala.jdk.CollectionConverters.*
 /**
   *
   */
-class NCModelPipelineProcessorSpec:
+class NCModelPipelineManagerSpec:
     /**
       *
       */
@@ -49,61 +48,15 @@ class NCModelPipelineProcessorSpec:
             pipeline.getEntityParsers.clear()
             pipeline.getEntityParsers.add(parser)
 
-            val res = new NCModelPipelineProcessor(new NCModelAdapter(CFG, 
pipeline)).prepVariants(txt, null, "userId")
+            val res = new NCModelPipelineManager(CFG, pipeline).prepare(txt, 
null, "userId")
 
-            println(s"Variants count: ${res.vars.size}")
-            for ((v, idx) <- res.vars.zipWithIndex)
+            println(s"Variants count: ${res.variants.size}")
+            for ((v, idx) <- res.variants.zipWithIndex)
                 println(s"Variant: $idx")
                 NCTestUtils.printEntities(txt, v.getEntities.asScala.toSeq)
 
-            require(res.vars.sizeIs == variantCnt)
+            require(res.variants.sizeIs == variantCnt)
 
         test("t1 t2", 4, NCSemanticTestElement("t1", "t2"), 
NCSemanticTestElement("t2", "t1"))
         test("t1 t2", 2, NCSemanticTestElement("t1", "t2"), 
NCSemanticTestElement("t2"))
 
-    /**
-      *
-      * @param delayMs
-      * @param iterCnt
-      * @return
-      */
-    private def mkSlowPipelineProcessor(delayMs: Long, iterCnt: Int): 
NCModelPipelineProcessor =
-        val pipeline = EN_PIPELINE.clone()
-
-        pipeline.getEntityParsers.clear()
-
-        def mkSlowParser(i: Int) =
-            new NCEntityParser:
-                override def parse(req: NCRequest, cfg: NCModelConfig, toks: 
JList[NCToken]): JList[NCEntity] =
-                    println(s"Parser called: $i")
-                    Thread.sleep(delayMs)
-                    java.util.Collections.emptyList()
-
-        (0 until iterCnt).foreach(i => 
pipeline.getEntityParsers.add(mkSlowParser(i)))
-
-        NCModelPipelineProcessor(new NCModelAdapter(CFG, pipeline))
-
-    /**
-      *
-      */
-    @Test
-    def testCancel(): Unit =
-        val fut = mkSlowPipelineProcessor(1, 10000).ask("any", null, "userId")
-
-        Thread.sleep(20)
-        require(fut.cancel(true))
-        Thread.sleep(20)
-
-        Assertions.assertThrows(classOf[CancellationException], () => fut.get)
-
-    /**
-      *
-      */
-    @Test
-    def testTimeout(): Unit =
-        val fut = mkSlowPipelineProcessor(1, 10000).ask("any", null, "userId")
-
-        Thread.sleep(20)
-
-        try Assertions.assertThrows(classOf[TimeoutException], () => 
fut.get(1, TimeUnit.MILLISECONDS))
-        finally fut.cancel(true)
diff --git 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/scan/NCModelIntentsInvalidArgsSpec.scala
 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/scan/NCModelIntentsInvalidArgsSpec.scala
index 2ed745e..ebd5ed5 100644
--- 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/scan/NCModelIntentsInvalidArgsSpec.scala
+++ 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/scan/NCModelIntentsInvalidArgsSpec.scala
@@ -68,12 +68,13 @@ class NCModelIntentsInvalidArgsSpec:
         def col[T](t: T): util.List[T] = java.util.Collections.singletonList(t)
 
         new NCIntentMatch:
-            override def getIntentId: String = "intentId"
-            override def getIntentEntities: util.List[util.List[NCEntity]] = 
col(col(e))
+            override val getContext: NCContext = null
+            override val getIntentId: String = "intentId"
+            override val getIntentEntities: util.List[util.List[NCEntity]] = 
col(col(e))
             override def getTermEntities(idx: Int): util.List[NCEntity] = 
col(e)
             override def getTermEntities(termId: String): util.List[NCEntity] 
= col(e)
-            override def getVariant: NCVariant = new NCVariant:
-                override def getEntities: util.List[NCEntity] = col(e)
+            override val getVariant: NCVariant = new NCVariant:
+                override val getEntities: util.List[NCEntity] = col(e)
 
     private def mkResult0(obj: Any): NCResult =
         println(s"Result body: $obj, class=${obj.getClass}")
diff --git 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/scan/NCModelIntentsNestedSpec.scala
 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/scan/NCModelIntentsNestedSpec.scala
index 91e2f09..833832b 100644
--- 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/scan/NCModelIntentsNestedSpec.scala
+++ 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/scan/NCModelIntentsNestedSpec.scala
@@ -50,8 +50,7 @@ class NCModelIntentsNestedSpec:
 
     private val MDL_VALID2: NCModel = new NCTestModelAdapter:
         @NCIntent("import('scan/idl.idl')")
-        class RefClass:
-            ()
+        class RefClass
 
         @NCIntentObject
         val nested1: Object = new Object():
diff --git 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/scan/NCTestModelJava.java
 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/scan/NCTestModelJava.java
index 96560a6..1f2763e 100644
--- 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/scan/NCTestModelJava.java
+++ 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/impl/scan/NCTestModelJava.java
@@ -17,15 +17,14 @@
 
 package org.apache.nlpcraft.internal.impl.scan;
 
+import org.apache.nlpcraft.NCEntity;
 import org.apache.nlpcraft.NCIntent;
-import org.apache.nlpcraft.NCIntentRef;
 import org.apache.nlpcraft.NCIntentSample;
 import org.apache.nlpcraft.NCIntentSampleRef;
 import org.apache.nlpcraft.NCIntentTerm;
 import org.apache.nlpcraft.NCModel;
 import org.apache.nlpcraft.NCModelAdapter;
 import org.apache.nlpcraft.NCResult;
-import org.apache.nlpcraft.NCEntity;
 import org.apache.nlpcraft.nlp.util.opennlp.NCTestConfigJava;
 
 import java.util.List;
diff --git 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/intent/compiler/functions/NCIDLFunctionsModel.scala
 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/intent/compiler/functions/NCIDLFunctionsModel.scala
index af53525..3f9dd12 100644
--- 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/intent/compiler/functions/NCIDLFunctionsModel.scala
+++ 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/internal/intent/compiler/functions/NCIDLFunctionsModel.scala
@@ -18,8 +18,8 @@
 package org.apache.nlpcraft.internal.intent.compiler.functions
 
 import org.apache.nlpcraft.internal.intent.compiler.functions.NCIDLFunctions.*
-import org.junit.jupiter.api.Test
 import org.apache.nlpcraft.nlp.util.opennlp.*
+import org.junit.jupiter.api.Test
 
 import scala.language.implicitConversions
 
diff --git 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/token/parser/opennlp/NCOpenNLPTokenParserSpec.scala
 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/token/parser/opennlp/NCOpenNLPTokenParserSpec.scala
index e417397..12b52d3 100644
--- 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/token/parser/opennlp/NCOpenNLPTokenParserSpec.scala
+++ 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/token/parser/opennlp/NCOpenNLPTokenParserSpec.scala
@@ -50,16 +50,17 @@ class NCOpenNLPTokenParserSpec:
         test(
             "Test requests!",
             toks =>
-                require(toks.sizeIs == 3);
-                require(!isStopWord(toks.head));
+                require(toks.sizeIs == 3)
+                require(!isStopWord(toks.head))
                 require(isStopWord(toks.last))
         )
         test(
             "Test requests !",
-            toks =>
-                require(toks.sizeIs == 3);
-                require(!isStopWord(toks.head));
+            toks => {
+                require(toks.sizeIs == 3)
+                require(!isStopWord(toks.head))
                 require(isStopWord(toks.last))
+            }
         )
         test(
             // First and last are stop words,
@@ -67,21 +68,23 @@ class NCOpenNLPTokenParserSpec:
             // Note that "a ` a a` a" parsed as 5 tokens ("a", "`", ""a, "a`", 
"a") because OpenNLP tokenizer logic,
             // So we use spaces around quotes to simplify test.
             "a ` a a ` a",
-            toks =>
-                require(toks.sizeIs == 6);
-                require(isStopWord(toks.head));
-                require(isStopWord(toks.last));
+            toks => {
+                require(toks.sizeIs == 6)
+                require(isStopWord(toks.head))
+                require(isStopWord(toks.last))
                 require(toks.drop(1).reverse.drop(1).forall(p => 
!isStopWord(p)))
+            }
         )
         test(
             // First and last are stop words,
             // Third and fourth are not because brackets.
             "a ( a a ) a",
-            toks =>
-                require(toks.sizeIs == 6);
-                require(isStopWord(toks.head));
-                require(isStopWord(toks.last));
+            toks => {
+                require(toks.sizeIs == 6)
+                require(isStopWord(toks.head))
+                require(isStopWord(toks.last))
                 require(toks.drop(1).reverse.drop(1).forall(p => 
!isStopWord(p)))
+            }
         )
         test(
             // Invalid brackets.
diff --git 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestEntity.scala 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestEntity.scala
index 0640a80..945c667 100644
--- a/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestEntity.scala
+++ b/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestEntity.scala
@@ -19,9 +19,9 @@ package org.apache.nlpcraft.nlp.util
 
 import org.apache.nlpcraft.*
 import org.apache.nlpcraft.nlp.util.NCTestPipeline.*
+
+import java.util.{List as JList, Set as JSet}
 import scala.jdk.CollectionConverters.*
-import java.util.Set as JSet
-import java.util.List as JList
 
 /**
   * Entity test implementation.
diff --git 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestPipeline.scala 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestPipeline.scala
index 395dc47..5a27e9e 100644
--- a/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestPipeline.scala
+++ b/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestPipeline.scala
@@ -17,9 +17,9 @@
 
 package org.apache.nlpcraft.nlp.util
 
-import org.apache.nlpcraft.nlp.util.NCTestPipeline.*
 import org.apache.nlpcraft.*
 import org.apache.nlpcraft.nlp.token.parser.opennlp.NCOpenNLPTokenParser
+import org.apache.nlpcraft.nlp.util.NCTestPipeline.*
 
 import java.util.{Optional, ArrayList as JList}
 
diff --git 
a/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestRequest.scala 
b/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestRequest.scala
index 29d08c8..89c81d9 100644
--- a/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestRequest.scala
+++ b/nlpcraft/src/test/scala/org/apache/nlpcraft/nlp/util/NCTestRequest.scala
@@ -19,9 +19,10 @@ package org.apache.nlpcraft.nlp.util
 
 import org.apache.nlpcraft.*
 import org.apache.nlpcraft.nlp.util.NCTestPipeline.*
-import scala.jdk.CollectionConverters.*
+
 import java.util
 import java.util.Map as JMap
+import scala.jdk.CollectionConverters.*
 
 /**
   * Request test implementation.

Reply via email to