This is an automated email from the ASF dual-hosted git repository.

xtsong pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-agents.git

commit 8111811bc6c47a02af8363d17e159800d08cae05
Author: WenjinXie <[email protected]>
AuthorDate: Wed Jan 14 15:26:32 2026 +0800

    [api][python] Provide constant to point a resource to avoid importing 
integration package.
    
    fix
    
    fix
    
    fix
---
 .../apache/flink/agents/api/resource/Constant.java |  28 +++-
 .../test/ChatModelIntegrationAgent.java            |  16 +--
 .../test/EmbeddingIntegrationAgent.java            |   4 +-
 .../agents/integration/test/ReActAgentTest.java    |   4 +-
 .../test/VectorStoreIntegrationAgent.java          |   4 +-
 .../resource/test/ChatModelCrossLanguageAgent.java |   9 +-
 .../resource/test/EmbeddingCrossLanguageAgent.java |   9 +-
 .../test/VectorStoreCrossLanguageAgent.java        |  23 ++-
 examples/pom.xml                                   |   5 -
 .../flink/agents/examples/ReActAgentExample.java   |   4 +-
 .../examples/agents/ProductSuggestionAgent.java    |   4 +-
 .../examples/agents/ReviewAnalysisAgent.java       |   4 +-
 python/flink_agents/api/agents/agent.py            |  12 +-
 python/flink_agents/api/agents/react_agent.py      |   2 +-
 python/flink_agents/api/execution_environment.py   |  12 +-
 python/flink_agents/api/resource.py                | 159 ++++++++++++++-------
 .../chat_model_integration_agent.py                |  35 ++---
 .../e2e_tests_mcp/mcp_test.py                      |  13 +-
 .../e2e_tests_integration/react_agent_test.py      |  28 ++--
 .../chat_model_cross_language_agent.py             |  20 ++-
 .../embedding_model_cross_language_agent.py        |  10 +-
 .../vector_store_cross_language_agent.py           |  23 +--
 .../e2e_tests/long_term_memory_test.py             |  23 ++-
 .../agents/custom_types_and_resources.py           |   7 +-
 .../quickstart/agents/product_suggestion_agent.py  |   7 +-
 .../quickstart/agents/review_analysis_agent.py     |   7 +-
 .../examples/quickstart/react_agent_example.py     |  13 +-
 .../quickstart/workflow_multiple_agent_example.py  |   2 +
 .../quickstart/workflow_single_agent_example.py    |   2 +
 .../flink_agents/examples/rag/rag_agent_example.py |  27 ++--
 python/flink_agents/plan/resource_provider.py      |   2 +-
 .../python_agent_plan_compatibility_test_agent.py  |   2 +-
 python/flink_agents/plan/tests/test_agent_plan.py  |  20 +--
 .../plan/tests/test_resource_provider.py           |   2 +-
 .../runtime/tests/test_built_in_actions.py         |   4 +-
 .../runtime/tests/test_get_resource_in_action.py   |   2 +-
 36 files changed, 284 insertions(+), 264 deletions(-)

diff --git 
a/api/src/main/java/org/apache/flink/agents/api/resource/Constant.java 
b/api/src/main/java/org/apache/flink/agents/api/resource/Constant.java
index 4f893f8d..bda3f200 100644
--- a/api/src/main/java/org/apache/flink/agents/api/resource/Constant.java
+++ b/api/src/main/java/org/apache/flink/agents/api/resource/Constant.java
@@ -26,38 +26,56 @@ package org.apache.flink.agents.api.resource;
  */
 public class Constant {
     // Built-in ChatModel
+    // python wrapper
+    public static String PYTHON_CHAT_MODEL_CONNECTION =
+            
"org.apache.flink.agents.api.chat.model.python.PythonChatModelConnection";
+    public static String PYTHON_CHAT_MODEL_SETUP =
+            
"org.apache.flink.agents.api.chat.model.python.PythonChatModelSetup";
+
     // ollama
     public static String OLLAMA_CHAT_MODEL_CONNECTION =
             
"org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelConnection";
-    public static String OLLAMA_CHAT_MODEL =
+    public static String OLLAMA_CHAT_MODEL_SETUP =
             
"org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelSetup";
 
     // anthropic
     public static String ANTHROPIC_CHAT_MODEL_CONNECTION =
             
"org.apache.flink.agents.integrations.chatmodels.anthropic.AnthropicChatModelConnection";
-    public static String ANTHROPIC_CHAT_MODEL =
+    public static String ANTHROPIC_CHAT_MODEL_SETUP =
             
"org.apache.flink.agents.integrations.chatmodels.anthropic.AnthropicChatModelSetup";
 
     // Azure
     public static String AZURE_CHAT_MODEL_CONNECTION =
             
"org.apache.flink.agents.integrations.chatmodels.anthropic.AzureAIChatModelConnection";
-    public static String AZURE_CHAT_MODEL =
+    public static String AZURE_CHAT_MODEL_SETUP =
             
"org.apache.flink.agents.integrations.chatmodels.anthropic.AzureAIChatModelSetup";
 
     // OpenAI
     public static String OPENAI_CHAT_MODEL_CONNECTION =
             
"org.apache.flink.agents.integrations.chatmodels.openai.OpenAIChatModelConnection";
-    public static String OPENAI_CHAT_MODEL =
+    public static String OPENAI_CHAT_MODEL_SETUP =
             
"org.apache.flink.agents.integrations.chatmodels.openai.OpenAIChatModelSetup";
 
     // Built-in EmbeddingModel
+    // python wrapper
+    public static String PYTHON_EMBEDDING_MODEL_CONNECTION =
+            
"org.apache.flink.agents.api.embedding.model.python.PythonEmbeddingModelConnection";
+    public static String PYTHON_EMBEDDING_MODEL_SETUP =
+            
"org.apache.flink.agents.api.embedding.model.python.PythonEmbeddingModelSetup";
+
     // ollama
     public static String OLLAMA_EMBEDDING_MODEL_CONNECTION =
             
"org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelConnection";
-    public static String OLLAMA_EMBEDDING_MODEL =
+    public static String OLLAMA_EMBEDDING_MODEL_SETUP =
             
"org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelSetup";
 
     // Built-in VectorStore
+    // python wrapper
+    public static String PYTHON_VECTOR_STORE =
+            
"org.apache.flink.agents.api.vectorstores.python.PythonVectorStore";
+    public static String PYTHON_COLLECTION_MANAGEABLE_VECTOR_STORE =
+            
"org.apache.flink.agents.api.vectorstores.python.PythonCollectionManageableVectorStore";
+
     // elasticsearch
     public static String ELASTICSEARCH_VECTOR_STORE =
             
"org.apache.flink.agents.integrations.vectorstores.elasticsearch.ElasticsearchVectorStore";
diff --git 
a/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/ChatModelIntegrationAgent.java
 
b/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/ChatModelIntegrationAgent.java
index 75b097f0..2de3f437 100644
--- 
a/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/ChatModelIntegrationAgent.java
+++ 
b/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/ChatModelIntegrationAgent.java
@@ -39,14 +39,14 @@ import org.apache.flink.agents.api.resource.ResourceType;
 import java.util.Collections;
 import java.util.List;
 
-import static 
org.apache.flink.agents.api.resource.Constant.ANTHROPIC_CHAT_MODEL;
 import static 
org.apache.flink.agents.api.resource.Constant.ANTHROPIC_CHAT_MODEL_CONNECTION;
-import static org.apache.flink.agents.api.resource.Constant.AZURE_CHAT_MODEL;
+import static 
org.apache.flink.agents.api.resource.Constant.ANTHROPIC_CHAT_MODEL_SETUP;
 import static 
org.apache.flink.agents.api.resource.Constant.AZURE_CHAT_MODEL_CONNECTION;
-import static org.apache.flink.agents.api.resource.Constant.OLLAMA_CHAT_MODEL;
+import static 
org.apache.flink.agents.api.resource.Constant.AZURE_CHAT_MODEL_SETUP;
 import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_CHAT_MODEL_CONNECTION;
-import static org.apache.flink.agents.api.resource.Constant.OPENAI_CHAT_MODEL;
+import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_CHAT_MODEL_SETUP;
 import static 
org.apache.flink.agents.api.resource.Constant.OPENAI_CHAT_MODEL_CONNECTION;
+import static 
org.apache.flink.agents.api.resource.Constant.OPENAI_CHAT_MODEL_SETUP;
 
 /**
  * Agent example that integrates an external Ollama chat model into Flink 
Agents.
@@ -108,7 +108,7 @@ public class ChatModelIntegrationAgent extends Agent {
         String provider = System.getProperty("MODEL_PROVIDER", "OLLAMA");
 
         if (provider.equals("OLLAMA")) {
-            return ResourceDescriptor.Builder.newBuilder(OLLAMA_CHAT_MODEL)
+            return 
ResourceDescriptor.Builder.newBuilder(OLLAMA_CHAT_MODEL_SETUP)
                     .addInitialArgument("connection", "chatModelConnection")
                     .addInitialArgument("model", OLLAMA_MODEL)
                     .addInitialArgument(
@@ -116,7 +116,7 @@ public class ChatModelIntegrationAgent extends Agent {
                             List.of("calculateBMI", "convertTemperature", 
"createRandomNumber"))
                     .build();
         } else if (provider.equals("AZURE")) {
-            return ResourceDescriptor.Builder.newBuilder(AZURE_CHAT_MODEL)
+            return 
ResourceDescriptor.Builder.newBuilder(AZURE_CHAT_MODEL_SETUP)
                     .addInitialArgument("connection", "chatModelConnection")
                     .addInitialArgument("model", "gpt-4o")
                     .addInitialArgument(
@@ -124,7 +124,7 @@ public class ChatModelIntegrationAgent extends Agent {
                             List.of("calculateBMI", "convertTemperature", 
"createRandomNumber"))
                     .build();
         } else if (provider.equals("ANTHROPIC")) {
-            return ResourceDescriptor.Builder.newBuilder(ANTHROPIC_CHAT_MODEL)
+            return 
ResourceDescriptor.Builder.newBuilder(ANTHROPIC_CHAT_MODEL_SETUP)
                     .addInitialArgument("connection", "chatModelConnection")
                     .addInitialArgument("model", "claude-sonnet-4-20250514")
                     .addInitialArgument(
@@ -132,7 +132,7 @@ public class ChatModelIntegrationAgent extends Agent {
                             List.of("calculateBMI", "convertTemperature", 
"createRandomNumber"))
                     .build();
         } else if (provider.equals("OPENAI")) {
-            return ResourceDescriptor.Builder.newBuilder(OPENAI_CHAT_MODEL)
+            return 
ResourceDescriptor.Builder.newBuilder(OPENAI_CHAT_MODEL_SETUP)
                     .addInitialArgument("connection", "chatModelConnection")
                     .addInitialArgument("model", "gpt-4o-mini")
                     .addInitialArgument(
diff --git 
a/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/EmbeddingIntegrationAgent.java
 
b/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/EmbeddingIntegrationAgent.java
index 8a7bcec0..7fb83d7f 100644
--- 
a/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/EmbeddingIntegrationAgent.java
+++ 
b/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/EmbeddingIntegrationAgent.java
@@ -34,8 +34,8 @@ import 
org.apache.flink.agents.api.resource.ResourceDescriptor;
 import java.util.HashMap;
 import java.util.Map;
 
-import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_EMBEDDING_MODEL;
 import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION;
+import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_EMBEDDING_MODEL_SETUP;
 
 /**
  * Integration test agent for verifying embedding functionality with Ollama 
models.
@@ -67,7 +67,7 @@ public class EmbeddingIntegrationAgent extends Agent {
     public static ResourceDescriptor embeddingModel() {
         String provider = System.getProperty("MODEL_PROVIDER", "OLLAMA");
         if (provider.equals("OLLAMA")) {
-            return 
ResourceDescriptor.Builder.newBuilder(OLLAMA_EMBEDDING_MODEL)
+            return 
ResourceDescriptor.Builder.newBuilder(OLLAMA_EMBEDDING_MODEL_SETUP)
                     .addInitialArgument("connection", "embeddingConnection")
                     .addInitialArgument("model", OLLAMA_MODEL)
                     .build();
diff --git 
a/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/ReActAgentTest.java
 
b/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/ReActAgentTest.java
index 7d4a005d..8c61a7fe 100644
--- 
a/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/ReActAgentTest.java
+++ 
b/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/ReActAgentTest.java
@@ -49,8 +49,8 @@ import java.util.List;
 
 import static 
org.apache.flink.agents.api.agents.AgentExecutionOptions.ERROR_HANDLING_STRATEGY;
 import static 
org.apache.flink.agents.api.agents.AgentExecutionOptions.MAX_RETRIES;
-import static org.apache.flink.agents.api.resource.Constant.OLLAMA_CHAT_MODEL;
 import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_CHAT_MODEL_CONNECTION;
+import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_CHAT_MODEL_SETUP;
 import static 
org.apache.flink.agents.integration.test.OllamaPreparationUtils.pullModel;
 
 public class ReActAgentTest {
@@ -155,7 +155,7 @@ public class ReActAgentTest {
     // create ReAct agent.
     private static Agent getAgent() {
         ResourceDescriptor chatModelDescriptor =
-                ResourceDescriptor.Builder.newBuilder(OLLAMA_CHAT_MODEL)
+                ResourceDescriptor.Builder.newBuilder(OLLAMA_CHAT_MODEL_SETUP)
                         .addInitialArgument("connection", "ollama")
                         .addInitialArgument("model", OLLAMA_MODEL)
                         .addInitialArgument("tools", List.of("add", 
"multiply"))
diff --git 
a/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/VectorStoreIntegrationAgent.java
 
b/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/VectorStoreIntegrationAgent.java
index a4752957..85a7205b 100644
--- 
a/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/VectorStoreIntegrationAgent.java
+++ 
b/e2e-test/flink-agents-end-to-end-tests-integration/src/test/java/org/apache/flink/agents/integration/test/VectorStoreIntegrationAgent.java
@@ -36,8 +36,8 @@ import java.util.List;
 import java.util.Map;
 
 import static 
org.apache.flink.agents.api.resource.Constant.ELASTICSEARCH_VECTOR_STORE;
-import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_EMBEDDING_MODEL;
 import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION;
+import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_EMBEDDING_MODEL_SETUP;
 
 public class VectorStoreIntegrationAgent extends Agent {
     public static final String OLLAMA_MODEL = "nomic-embed-text";
@@ -59,7 +59,7 @@ public class VectorStoreIntegrationAgent extends Agent {
     public static ResourceDescriptor embeddingModel() {
         String provider = System.getProperty("MODEL_PROVIDER", "OLLAMA");
         if (provider.equals("OLLAMA")) {
-            return 
ResourceDescriptor.Builder.newBuilder(OLLAMA_EMBEDDING_MODEL)
+            return 
ResourceDescriptor.Builder.newBuilder(OLLAMA_EMBEDDING_MODEL_SETUP)
                     .addInitialArgument("connection", "embeddingConnection")
                     .addInitialArgument("model", OLLAMA_MODEL)
                     .build();
diff --git 
a/e2e-test/flink-agents-end-to-end-tests-resource-cross-language/src/test/java/org/apache/flink/agents/resource/test/ChatModelCrossLanguageAgent.java
 
b/e2e-test/flink-agents-end-to-end-tests-resource-cross-language/src/test/java/org/apache/flink/agents/resource/test/ChatModelCrossLanguageAgent.java
index 9ea8f1b3..5f518f21 100644
--- 
a/e2e-test/flink-agents-end-to-end-tests-resource-cross-language/src/test/java/org/apache/flink/agents/resource/test/ChatModelCrossLanguageAgent.java
+++ 
b/e2e-test/flink-agents-end-to-end-tests-resource-cross-language/src/test/java/org/apache/flink/agents/resource/test/ChatModelCrossLanguageAgent.java
@@ -40,6 +40,9 @@ import 
org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelCon
 import java.util.Collections;
 import java.util.List;
 
+import static 
org.apache.flink.agents.api.resource.Constant.PYTHON_CHAT_MODEL_CONNECTION;
+import static 
org.apache.flink.agents.api.resource.Constant.PYTHON_CHAT_MODEL_SETUP;
+
 /**
  * Agent example that integrates an external Ollama chat model into Flink 
Agents.
  *
@@ -74,7 +77,7 @@ public class ChatModelCrossLanguageAgent extends Agent {
 
     @ChatModelConnection
     public static ResourceDescriptor pythonChatModelConnection() {
-        return 
ResourceDescriptor.Builder.newBuilder(PythonChatModelConnection.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(PYTHON_CHAT_MODEL_CONNECTION)
                 .addInitialArgument(
                         "module", 
"flink_agents.integrations.chat_models.ollama_chat_model")
                 .addInitialArgument("clazz", "OllamaChatModelConnection")
@@ -83,7 +86,7 @@ public class ChatModelCrossLanguageAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor temperatureChatModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(PythonChatModelSetup.class.getName())
+        return ResourceDescriptor.Builder.newBuilder(PYTHON_CHAT_MODEL_SETUP)
                 .addInitialArgument(
                         "module", 
"flink_agents.integrations.chat_models.ollama_chat_model")
                 .addInitialArgument("clazz", "OllamaChatModelSetup")
@@ -96,7 +99,7 @@ public class ChatModelCrossLanguageAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor chatModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(PythonChatModelSetup.class.getName())
+        return ResourceDescriptor.Builder.newBuilder(PYTHON_CHAT_MODEL_SETUP)
                 .addInitialArgument(
                         "module", 
"flink_agents.integrations.chat_models.ollama_chat_model")
                 .addInitialArgument("clazz", "OllamaChatModelSetup")
diff --git 
a/e2e-test/flink-agents-end-to-end-tests-resource-cross-language/src/test/java/org/apache/flink/agents/resource/test/EmbeddingCrossLanguageAgent.java
 
b/e2e-test/flink-agents-end-to-end-tests-resource-cross-language/src/test/java/org/apache/flink/agents/resource/test/EmbeddingCrossLanguageAgent.java
index a0c6d89b..98f53e39 100644
--- 
a/e2e-test/flink-agents-end-to-end-tests-resource-cross-language/src/test/java/org/apache/flink/agents/resource/test/EmbeddingCrossLanguageAgent.java
+++ 
b/e2e-test/flink-agents-end-to-end-tests-resource-cross-language/src/test/java/org/apache/flink/agents/resource/test/EmbeddingCrossLanguageAgent.java
@@ -27,14 +27,15 @@ import 
org.apache.flink.agents.api.annotation.EmbeddingModelConnection;
 import org.apache.flink.agents.api.annotation.EmbeddingModelSetup;
 import org.apache.flink.agents.api.context.RunnerContext;
 import org.apache.flink.agents.api.embedding.model.BaseEmbeddingModelSetup;
-import 
org.apache.flink.agents.api.embedding.model.python.PythonEmbeddingModelConnection;
-import 
org.apache.flink.agents.api.embedding.model.python.PythonEmbeddingModelSetup;
 import org.apache.flink.agents.api.resource.ResourceDescriptor;
 
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import static 
org.apache.flink.agents.api.resource.Constant.PYTHON_EMBEDDING_MODEL_CONNECTION;
+import static 
org.apache.flink.agents.api.resource.Constant.PYTHON_EMBEDDING_MODEL_SETUP;
+
 /**
  * Integration test agent for verifying embedding functionality with Python 
embedding model.
  *
@@ -50,7 +51,7 @@ public class EmbeddingCrossLanguageAgent extends Agent {
 
     @EmbeddingModelConnection
     public static ResourceDescriptor embeddingConnection() {
-        return 
ResourceDescriptor.Builder.newBuilder(PythonEmbeddingModelConnection.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(PYTHON_EMBEDDING_MODEL_CONNECTION)
                 .addInitialArgument(
                         "module",
                         
"flink_agents.integrations.embedding_models.local.ollama_embedding_model")
@@ -60,7 +61,7 @@ public class EmbeddingCrossLanguageAgent extends Agent {
 
     @EmbeddingModelSetup
     public static ResourceDescriptor embeddingModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(PythonEmbeddingModelSetup.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(PYTHON_EMBEDDING_MODEL_SETUP)
                 .addInitialArgument(
                         "module",
                         
"flink_agents.integrations.embedding_models.local.ollama_embedding_model")
diff --git 
a/e2e-test/flink-agents-end-to-end-tests-resource-cross-language/src/test/java/org/apache/flink/agents/resource/test/VectorStoreCrossLanguageAgent.java
 
b/e2e-test/flink-agents-end-to-end-tests-resource-cross-language/src/test/java/org/apache/flink/agents/resource/test/VectorStoreCrossLanguageAgent.java
index 6368bd13..62410527 100644
--- 
a/e2e-test/flink-agents-end-to-end-tests-resource-cross-language/src/test/java/org/apache/flink/agents/resource/test/VectorStoreCrossLanguageAgent.java
+++ 
b/e2e-test/flink-agents-end-to-end-tests-resource-cross-language/src/test/java/org/apache/flink/agents/resource/test/VectorStoreCrossLanguageAgent.java
@@ -27,8 +27,6 @@ import 
org.apache.flink.agents.api.annotation.EmbeddingModelSetup;
 import org.apache.flink.agents.api.annotation.VectorStore;
 import org.apache.flink.agents.api.context.MemoryObject;
 import org.apache.flink.agents.api.context.RunnerContext;
-import 
org.apache.flink.agents.api.embedding.model.python.PythonEmbeddingModelConnection;
-import 
org.apache.flink.agents.api.embedding.model.python.PythonEmbeddingModelSetup;
 import org.apache.flink.agents.api.event.ContextRetrievalRequestEvent;
 import org.apache.flink.agents.api.event.ContextRetrievalResponseEvent;
 import org.apache.flink.agents.api.resource.ResourceDescriptor;
@@ -36,8 +34,6 @@ import org.apache.flink.agents.api.resource.ResourceType;
 import 
org.apache.flink.agents.api.vectorstores.CollectionManageableVectorStore;
 import org.apache.flink.agents.api.vectorstores.Document;
 import 
org.apache.flink.agents.api.vectorstores.python.PythonCollectionManageableVectorStore;
-import 
org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelConnection;
-import 
org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelSetup;
 import org.junit.jupiter.api.Assertions;
 import pemja.core.PythonException;
 
@@ -45,6 +41,12 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION;
+import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_EMBEDDING_MODEL_SETUP;
+import static 
org.apache.flink.agents.api.resource.Constant.PYTHON_COLLECTION_MANAGEABLE_VECTOR_STORE;
+import static 
org.apache.flink.agents.api.resource.Constant.PYTHON_EMBEDDING_MODEL_CONNECTION;
+import static 
org.apache.flink.agents.api.resource.Constant.PYTHON_EMBEDDING_MODEL_SETUP;
+
 /**
  * Integration test agent for verifying vector store functionality with Python 
vector store
  * implementation.
@@ -63,16 +65,14 @@ public class VectorStoreCrossLanguageAgent extends Agent {
     @EmbeddingModelConnection
     public static ResourceDescriptor embeddingConnection() {
         if (System.getProperty("EMBEDDING_TYPE", "PYTHON").equals("PYTHON")) {
-            return ResourceDescriptor.Builder.newBuilder(
-                            PythonEmbeddingModelConnection.class.getName())
+            return 
ResourceDescriptor.Builder.newBuilder(PYTHON_EMBEDDING_MODEL_CONNECTION)
                     .addInitialArgument(
                             "module",
                             
"flink_agents.integrations.embedding_models.local.ollama_embedding_model")
                     .addInitialArgument("clazz", 
"OllamaEmbeddingModelConnection")
                     .build();
         } else {
-            return ResourceDescriptor.Builder.newBuilder(
-                            OllamaEmbeddingModelConnection.class.getName())
+            return 
ResourceDescriptor.Builder.newBuilder(OLLAMA_EMBEDDING_MODEL_CONNECTION)
                     .addInitialArgument("host", "http://localhost:11434";)
                     .addInitialArgument("timeout", 60)
                     .build();
@@ -82,7 +82,7 @@ public class VectorStoreCrossLanguageAgent extends Agent {
     @EmbeddingModelSetup
     public static ResourceDescriptor embeddingModel() {
         if (System.getProperty("EMBEDDING_TYPE", "PYTHON").equals("PYTHON")) {
-            return 
ResourceDescriptor.Builder.newBuilder(PythonEmbeddingModelSetup.class.getName())
+            return 
ResourceDescriptor.Builder.newBuilder(PYTHON_EMBEDDING_MODEL_SETUP)
                     .addInitialArgument(
                             "module",
                             
"flink_agents.integrations.embedding_models.local.ollama_embedding_model")
@@ -91,7 +91,7 @@ public class VectorStoreCrossLanguageAgent extends Agent {
                     .addInitialArgument("model", OLLAMA_MODEL)
                     .build();
         } else {
-            return 
ResourceDescriptor.Builder.newBuilder(OllamaEmbeddingModelSetup.class.getName())
+            return 
ResourceDescriptor.Builder.newBuilder(OLLAMA_EMBEDDING_MODEL_SETUP)
                     .addInitialArgument("connection", "embeddingConnection")
                     .addInitialArgument("model", OLLAMA_MODEL)
                     .build();
@@ -100,8 +100,7 @@ public class VectorStoreCrossLanguageAgent extends Agent {
 
     @VectorStore
     public static ResourceDescriptor vectorStore() {
-        return ResourceDescriptor.Builder.newBuilder(
-                        PythonCollectionManageableVectorStore.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(PYTHON_COLLECTION_MANAGEABLE_VECTOR_STORE)
                 .addInitialArgument(
                         "module",
                         
"flink_agents.integrations.vector_stores.chroma.chroma_vector_store")
diff --git a/examples/pom.xml b/examples/pom.xml
index 1e1b2298..96ee9b71 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -66,11 +66,6 @@ under the License.
             <artifactId>flink-clients</artifactId>
             <version>${flink.version}</version>
         </dependency>
-        <dependency>
-            <groupId>org.apache.flink</groupId>
-            
<artifactId>flink-agents-integrations-chat-models-ollama</artifactId>
-            <version>${project.version}</version>
-        </dependency>
 
     </dependencies>
 
diff --git 
a/examples/src/main/java/org/apache/flink/agents/examples/ReActAgentExample.java
 
b/examples/src/main/java/org/apache/flink/agents/examples/ReActAgentExample.java
index f3b7f5f2..e0460dfb 100644
--- 
a/examples/src/main/java/org/apache/flink/agents/examples/ReActAgentExample.java
+++ 
b/examples/src/main/java/org/apache/flink/agents/examples/ReActAgentExample.java
@@ -38,7 +38,7 @@ import java.io.File;
 import java.time.Duration;
 import java.util.Collections;
 
-import static org.apache.flink.agents.api.resource.Constant.OLLAMA_CHAT_MODEL;
+import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_CHAT_MODEL_SETUP;
 import static 
org.apache.flink.agents.examples.WorkflowSingleAgentExample.copyResource;
 
 /**
@@ -144,7 +144,7 @@ public class ReActAgentExample {
     // Create ReAct agent.
     private static ReActAgent getReActAgent() {
         return new ReActAgent(
-                ResourceDescriptor.Builder.newBuilder(OLLAMA_CHAT_MODEL)
+                ResourceDescriptor.Builder.newBuilder(OLLAMA_CHAT_MODEL_SETUP)
                         .addInitialArgument("connection", 
"ollamaChatModelConnection")
                         .addInitialArgument("model", "qwen3:8b")
                         .addInitialArgument(
diff --git 
a/examples/src/main/java/org/apache/flink/agents/examples/agents/ProductSuggestionAgent.java
 
b/examples/src/main/java/org/apache/flink/agents/examples/agents/ProductSuggestionAgent.java
index aca3138a..b7742f20 100644
--- 
a/examples/src/main/java/org/apache/flink/agents/examples/agents/ProductSuggestionAgent.java
+++ 
b/examples/src/main/java/org/apache/flink/agents/examples/agents/ProductSuggestionAgent.java
@@ -38,7 +38,7 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
-import static org.apache.flink.agents.api.resource.Constant.OLLAMA_CHAT_MODEL;
+import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_CHAT_MODEL_SETUP;
 import static 
org.apache.flink.agents.examples.agents.CustomTypesAndResources.PRODUCT_SUGGESTION_PROMPT;
 
 /**
@@ -57,7 +57,7 @@ public class ProductSuggestionAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor generateSuggestionModel() {
-        return ResourceDescriptor.Builder.newBuilder(OLLAMA_CHAT_MODEL)
+        return ResourceDescriptor.Builder.newBuilder(OLLAMA_CHAT_MODEL_SETUP)
                 .addInitialArgument("connection", "ollamaChatModelConnection")
                 .addInitialArgument("model", "qwen3:8b")
                 .addInitialArgument("extract_reasoning", "true")
diff --git 
a/examples/src/main/java/org/apache/flink/agents/examples/agents/ReviewAnalysisAgent.java
 
b/examples/src/main/java/org/apache/flink/agents/examples/agents/ReviewAnalysisAgent.java
index 246d67ef..275b27c7 100644
--- 
a/examples/src/main/java/org/apache/flink/agents/examples/agents/ReviewAnalysisAgent.java
+++ 
b/examples/src/main/java/org/apache/flink/agents/examples/agents/ReviewAnalysisAgent.java
@@ -40,7 +40,7 @@ import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
-import static org.apache.flink.agents.api.resource.Constant.OLLAMA_CHAT_MODEL;
+import static 
org.apache.flink.agents.api.resource.Constant.OLLAMA_CHAT_MODEL_SETUP;
 import static 
org.apache.flink.agents.examples.agents.CustomTypesAndResources.REVIEW_ANALYSIS_PROMPT;
 
 /**
@@ -61,7 +61,7 @@ public class ReviewAnalysisAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor reviewAnalysisModel() {
-        return ResourceDescriptor.Builder.newBuilder(OLLAMA_CHAT_MODEL)
+        return ResourceDescriptor.Builder.newBuilder(OLLAMA_CHAT_MODEL_SETUP)
                 .addInitialArgument("connection", "ollamaChatModelConnection")
                 .addInitialArgument("model", "qwen3:8b")
                 .addInitialArgument("prompt", "reviewAnalysisPrompt")
diff --git a/python/flink_agents/api/agents/agent.py 
b/python/flink_agents/api/agents/agent.py
index bef4f10c..7aab9e77 100644
--- a/python/flink_agents/api/agents/agent.py
+++ b/python/flink_agents/api/agents/agent.py
@@ -133,7 +133,7 @@ class Agent(ABC):
         return self
 
     def add_resource(
-        self, name: str, instance: SerializableResource | ResourceDescriptor
+        self, name: str, resource_type: ResourceType, instance: 
SerializableResource | ResourceDescriptor
     ) -> "Agent":
         """Add resource to agent instance.
 
@@ -141,6 +141,8 @@ class Agent(ABC):
         ----------
         name : str
             The name of the prompt, should be unique in the same Agent.
+        resource_type: ResourceType
+            The type of the resource.
         instance: SerializableResource | ResourceDescriptor
             The serializable resource instance, or the descriptor of resource.
 
@@ -149,14 +151,6 @@ class Agent(ABC):
         Agent
             The agent to add the resource.
         """
-        if isinstance(instance, SerializableResource):
-            resource_type = instance.resource_type()
-        elif isinstance(instance, ResourceDescriptor):
-            resource_type = instance.clazz.resource_type()
-        else:
-            err_msg = f"Unexpected resource {instance}"
-            raise TypeError(err_msg)
-
         if name in self._resources[resource_type]:
             msg = f"{resource_type.value} {name} already defined"
             raise ValueError(msg)
diff --git a/python/flink_agents/api/agents/react_agent.py 
b/python/flink_agents/api/agents/react_agent.py
index f540953a..fbb4b93f 100644
--- a/python/flink_agents/api/agents/react_agent.py
+++ b/python/flink_agents/api/agents/react_agent.py
@@ -116,7 +116,7 @@ class ReActAgent(Agent):
             response format of llm, and add output parser according to the 
schema.
         """
         super().__init__()
-        self.add_resource(_DEFAULT_CHAT_MODEL, chat_model)
+        self.add_resource(_DEFAULT_CHAT_MODEL, ResourceType.CHAT_MODEL, 
chat_model)
 
         if output_schema:
             if isinstance(output_schema, type) and issubclass(output_schema, 
BaseModel):
diff --git a/python/flink_agents/api/execution_environment.py 
b/python/flink_agents/api/execution_environment.py
index 21e6175a..4b4c80b6 100644
--- a/python/flink_agents/api/execution_environment.py
+++ b/python/flink_agents/api/execution_environment.py
@@ -226,7 +226,7 @@ class AgentsExecutionEnvironment(ABC):
         """Execute agent individually."""
 
     def add_resource(
-        self, name: str, instance: SerializableResource | ResourceDescriptor
+        self, name: str, resource_type: ResourceType, instance: 
SerializableResource | ResourceDescriptor
     ) -> "AgentsExecutionEnvironment":
         """Register resource to agent execution environment.
 
@@ -234,6 +234,8 @@ class AgentsExecutionEnvironment(ABC):
         ----------
         name : str
             The name of the prompt, should be unique in the same Agent.
+        resource_type: ResourceType
+            The type of the resource.
         instance: SerializableResource | ResourceDescriptor
             The serializable resource instance, or the descriptor of resource.
 
@@ -242,14 +244,6 @@ class AgentsExecutionEnvironment(ABC):
         AgentsExecutionEnvironment
             The environment to register the resource.
         """
-        if isinstance(instance, SerializableResource):
-            resource_type = instance.resource_type()
-        elif isinstance(instance, ResourceDescriptor):
-            resource_type = instance.clazz.resource_type()
-        else:
-            err_msg = f"Unexpected resource {instance}"
-            raise TypeError(err_msg)
-
         if name in self._resources[resource_type]:
             msg = f"{resource_type.value} {name} already defined"
             raise ValueError(msg)
diff --git a/python/flink_agents/api/resource.py 
b/python/flink_agents/api/resource.py
index ff9be303..f7a83e46 100644
--- a/python/flink_agents/api/resource.py
+++ b/python/flink_agents/api/resource.py
@@ -20,7 +20,7 @@ from abc import ABC, abstractmethod
 from enum import Enum
 from typing import TYPE_CHECKING, Any, Callable, Dict, Type
 
-from pydantic import BaseModel, Field, PrivateAttr, model_serializer, 
model_validator
+from pydantic import BaseModel, Field, PrivateAttr, model_validator
 
 if TYPE_CHECKING:
     from flink_agents.api.metric_group import MetricGroup
@@ -109,66 +109,68 @@ class ResourceDescriptor(BaseModel):
     instantiation.
 
     Attributes:
-        clazz: The Python Resource class name.
+        target_module: The module name of the resource class.
+        target_clazz: The class name of the resource.
         arguments: Dictionary containing resource initialization parameters.
     """
-    clazz: Type[Resource] | None = None
+
+    _clazz: Type[Resource] = None
+    target_module: str
+    target_clazz: str
     arguments: Dict[str, Any]
 
-    def __init__(self, /,
-                 *,
-                 clazz: Type[Resource] | None = None,
-                 **arguments: Any) -> None:
+    def __init__(
+        self,
+        /,
+        *,
+        clazz: str | None = None,
+        target_module: str | None = None,
+        target_clazz: str | None = None,
+        arguments: Dict[str, Any] | None = None,
+        **kwargs: Any,
+    ) -> None:
         """Initialize ResourceDescriptor.
 
         Args:
-            clazz: The Resource class type to create a descriptor for.
-            **arguments: Additional arguments for resource initialization.
+            clazz: The fully qualified name of the resource implementation, 
including
+                    module and class.
+            target_module: The module name of the resource class.
+            target_clazz: The class name of the resource.
+            arguments: Dictionary containing resource initialization 
parameters.
+            **kwargs: Additional keywords arguments for resource 
initialization,
+            will merge into arguments.
 
         Usage:
-            descriptor = ResourceDescriptor(clazz=YourResourceClass,
+            descriptor = 
ResourceDescriptor(clazz="flink_agents.integrations.chat_models
+                                            
.ollama_chat_model.OllamaChatModelConnection",
                                             param1="value1",
                                             param2="value2")
         """
-        super().__init__(clazz=clazz, arguments=arguments)
-
-    @model_serializer
-    def __custom_serializer(self) -> dict[str, Any]:
-        """Serialize ResourceDescriptor to dictionary.
-
-        Returns:
-            Dictionary containing python_clazz, python_module, java_clazz, and
-            arguments.
-        """
-        return {
-            "target_clazz": self.clazz.__name__,
-            "target_module": self.clazz.__module__,
-            "arguments": self.arguments,
-        }
-
-    @model_validator(mode="before")
-    @classmethod
-    def __custom_deserialize(cls, data: dict[str, Any]) -> dict[str, Any]:
-        """Deserialize data to ResourceDescriptor fields.
-
-        Handles both new format (with python_module) and legacy format
-        (full path in python_clazz).
-
-        Args:
-            data: Dictionary or other data to deserialize.
-
-        Returns:
-            Dictionary with normalized field structure.
-        """
-        if "clazz" in data and data["clazz"] is not None:
-            return data
+        if clazz is not None:
+            parts = clazz.split(".")
+            target_module = ".".join(parts[:-1])
+            target_clazz = parts[-1]
+
+        if target_clazz is None or target_module is None:
+            msg = "The fully qualified name of the resource must be specified"
+            raise ValueError(msg)
+
+        args = {}
+        if arguments is not None:
+            args.update(arguments)
+        args.update(kwargs)
+
+        super().__init__(
+            target_module=target_module, target_clazz=target_clazz, 
arguments=args
+        )
 
-        args = data["arguments"]
-        python_clazz = args.pop("target_clazz")
-        python_module = args.pop("target_module")
-        data["clazz"] = get_resource_class(python_module, python_clazz)
-        data["arguments"] = args["arguments"]
-        return data
+    @property
+    def clazz(self) -> Type[Resource]:
+        """Get the class of the resource."""
+        if self._clazz is None:
+            module = importlib.import_module(self.target_module)
+            self._clazz = getattr(module, self.target_clazz)
+        return self._clazz
 
     def __eq__(self, other: object) -> bool:
         """Compare ResourceDescriptor objects, ignoring private _clazz field.
@@ -180,13 +182,20 @@ class ResourceDescriptor(BaseModel):
         if not isinstance(other, ResourceDescriptor):
             return False
         return (
-            self.clazz == other.clazz
+            self.target_module == other.target_module
+            and self.target_clazz == other.target_clazz
             and self.arguments == other.arguments
         )
 
     def __hash__(self) -> int:
         """Generate hash for ResourceDescriptor."""
-        return hash((self.clazz, tuple(sorted(self.arguments.items()))))
+        return hash(
+            (
+                self.target_module,
+                self.target_clazz,
+                tuple(sorted(self.arguments.items())),
+            )
+        )
 
 
 def get_resource_class(module_path: str, class_name: str) -> Type[Resource]:
@@ -201,3 +210,53 @@ def get_resource_class(module_path: str, class_name: str) 
-> Type[Resource]:
     """
     module = importlib.import_module(module_path)
     return getattr(module, class_name)
+
+
+class Constant:
+    """Constant strings for pointing a built-in resource implementation."""
+
+    # Built-in ChatModel
+    # java wrapper
+    JAVA_CHAT_MODEL_CONNECTION = (
+        "flink_agents.api.chat_models.java_chat_model.JavaChatModelConnection"
+    )
+    JAVA_CHAT_MODEL_SETUP = (
+        "flink_agents.api.chat_models.java_chat_model.JavaChatModelSetup"
+    )
+    # ollama
+    OLLAMA_CHAT_MODEL_CONNECTION = 
"flink_agents.integrations.chat_models.ollama_chat_model.OllamaChatModelConnection"
+    OLLAMA_CHAT_MODEL_SETUP = (
+        
"flink_agents.integrations.chat_models.ollama_chat_model.OllamaChatModelSetup"
+    )
+    # anthropic
+    ANTHROPIC_CHAT_MODEL_CONNECTION = 
"flink_agents.integrations.chat_models.anthropic.anthropic_chat_model.AnthropicChatModelConnection"
+    ANTHROPIC_CHAT_MODEL_SETUP = 
"flink_agents.integrations.chat_models.anthropic.anthropic_chat_model.AnthropicChatModelSetup"
+    # Azure
+    TONGYI_CHAT_MODEL_CONNECTION = 
"flink_agents.integrations.chat_models.tongyi_chat_model.TongyiChatModelConnection"
+    TONGYI_CHAT_MODEL_SETUP = (
+        
"flink_agents.integrations.chat_models.tongyi_chat_model.TongyiChatModelSetup"
+    )
+    # OpenAI
+    OPENAI_CHAT_MODEL_CONNECTION = 
"flink_agents.integrations.chat_models.openai.openai_chat_model.OpenAIChatModelConnection"
+    OPENAI_CHAT_MODEL_SETUP = 
"flink_agents.integrations.chat_models.openai.openai_chat_model.OpenAIChatModelSetup"
+
+    # Built-in EmbeddingModel
+    # java wrapper
+    JAVA_EMBEDDING_MODEL_CONNECTION = 
"flink_agents.api.embedding_models.java_embedding_model.JavaEmbeddingModelConnection"
+    JAVA_EMBEDDING_MODEL_SETUP = (
+        
"flink_agents.api.embedding_models.java_embedding_model.JavaEmbeddingModelSetup"
+    )
+    # ollama
+    OLLAMA_EMBEDDING_MODEL_CONNECTION = 
"flink_agents.integrations.embedding_models.local.ollama_embedding_model.OllamaEmbeddingModelConnection"
+    OLLAMA_EMBEDDING_MODEL_SETUP = 
"flink_agents.integrations.embedding_models.local.ollama_embedding_model.OllamaEmbeddingModelSetup"
+
+    # OpenAI
+    OPENAI_EMBEDDING_MODEL_CONNECTION = 
"flink_agents.integrations.embedding_models.openai_embedding_model.OpenAIEmbeddingModelConnection"
+    OPENAI_EMBEDDING_MODEL_SETUP = 
"flink_agents.integrations.embedding_models.openai_embedding_model.OpenAIEmbeddingModelSetup"
+
+    # Built-in VectorStore
+    # java wrapper
+    JAVA_VECTOR_STORE = 
"flink_agents.api.vector_stores.java_vector_store.JavaVectorStore"
+    JAVA_COLLECTION_MANAGEABLE_VECTOR_STORE = 
"flink_agents.api.vector_stores.java_vector_store.JavaCollectionManageableVectorStore"
+    # chroma
+    CHROMA_VECTOR_STORE = 
"flink_agents.integrations.vector_stores.chroma.chroma_vector_store.ChromaVectorStore"
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_integration/chat_model_integration_agent.py
 
b/python/flink_agents/e2e_tests/e2e_tests_integration/chat_model_integration_agent.py
index 92d2d07c..b68a2f1a 100644
--- 
a/python/flink_agents/e2e_tests/e2e_tests_integration/chat_model_integration_agent.py
+++ 
b/python/flink_agents/e2e_tests/e2e_tests_integration/chat_model_integration_agent.py
@@ -27,20 +27,11 @@ from flink_agents.api.decorators import (
 )
 from flink_agents.api.events.chat_event import ChatRequestEvent, 
ChatResponseEvent
 from flink_agents.api.events.event import InputEvent, OutputEvent
-from flink_agents.api.resource import ResourceDescriptor
-from flink_agents.api.runner_context import RunnerContext
-from flink_agents.integrations.chat_models.ollama_chat_model import (
-    OllamaChatModelConnection,
-    OllamaChatModelSetup,
-)
-from flink_agents.integrations.chat_models.openai.openai_chat_model import (
-    OpenAIChatModelConnection,
-    OpenAIChatModelSetup,
-)
-from flink_agents.integrations.chat_models.tongyi_chat_model import (
-    TongyiChatModelConnection,
-    TongyiChatModelSetup,
+from flink_agents.api.resource import (
+    Constant,
+    ResourceDescriptor,
 )
+from flink_agents.api.runner_context import RunnerContext
 
 
 class ChatModelTestAgent(Agent):
@@ -51,21 +42,21 @@ class ChatModelTestAgent(Agent):
     def openai_connection() -> ResourceDescriptor:
         """ChatModelConnection responsible for openai model service 
connection."""
         return ResourceDescriptor(
-            clazz=OpenAIChatModelConnection, 
api_key=os.environ.get("OPENAI_API_KEY")
+            clazz=Constant.OPENAI_CHAT_MODEL_CONNECTION, 
api_key=os.environ.get("OPENAI_API_KEY")
         )
 
     @chat_model_connection
     @staticmethod
     def tongyi_connection() -> ResourceDescriptor:
         """ChatModelConnection responsible for tongyi model service 
connection."""
-        return ResourceDescriptor(clazz=TongyiChatModelConnection)
+        return ResourceDescriptor(clazz=Constant.TONGYI_CHAT_MODEL_CONNECTION)
 
     @chat_model_connection
     @staticmethod
     def ollama_connection() -> ResourceDescriptor:
         """ChatModelConnection responsible for ollama model service 
connection."""
         return ResourceDescriptor(
-            clazz=OllamaChatModelConnection, request_timeout=240.0
+            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, request_timeout=240.0
         )
 
     @chat_model_setup
@@ -75,14 +66,14 @@ class ChatModelTestAgent(Agent):
         model_provider = os.environ.get("MODEL_PROVIDER")
         if model_provider == "Tongyi":
             return ResourceDescriptor(
-                clazz=TongyiChatModelSetup,
+                clazz=Constant.TONGYI_CHAT_MODEL_SETUP,
                 connection="tongyi_connection",
                 model=os.environ.get("TONGYI_CHAT_MODEL", "qwen-plus"),
                 tools=["add"],
             )
         elif model_provider == "Ollama":
             return ResourceDescriptor(
-                clazz=OllamaChatModelSetup,
+                clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
                 connection="ollama_connection",
                 model=os.environ.get("OLLAMA_CHAT_MODEL", "qwen3:1.7b"),
                 tools=["add"],
@@ -90,7 +81,7 @@ class ChatModelTestAgent(Agent):
             )
         elif model_provider == "OpenAI":
             return ResourceDescriptor(
-                clazz=OpenAIChatModelSetup,
+                clazz=Constant.OPENAI_CHAT_MODEL_SETUP,
                 connection="openai_connection",
                 model=os.environ.get("OPENAI_CHAT_MODEL", "gpt-3.5-turbo"),
                 tools=["add"],
@@ -106,20 +97,20 @@ class ChatModelTestAgent(Agent):
         model_provider = os.environ.get("MODEL_PROVIDER")
         if model_provider == "Tongyi":
             return ResourceDescriptor(
-                clazz=TongyiChatModelSetup,
+                clazz=Constant.TONGYI_CHAT_MODEL_SETUP,
                 connection="tongyi_connection",
                 model=os.environ.get("TONGYI_CHAT_MODEL", "qwen-plus"),
             )
         elif model_provider == "Ollama":
             return ResourceDescriptor(
-                clazz=TongyiChatModelSetup,
+                clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
                 connection="ollama_connection",
                 model=os.environ.get("OLLAMA_CHAT_MODEL", "qwen3:1.7b"),
                 extract_reasoning=True,
             )
         elif model_provider == "OpenAI":
             return ResourceDescriptor(
-                clazz=OpenAIChatModelSetup,
+                clazz=Constant.OPENAI_CHAT_MODEL_SETUP,
                 connection="openai_connection",
                 model=os.environ.get("OPENAI_CHAT_MODEL", "gpt-3.5-turbo"),
             )
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_integration/e2e_tests_mcp/mcp_test.py 
b/python/flink_agents/e2e_tests/e2e_tests_integration/e2e_tests_mcp/mcp_test.py
index df00cce7..7c5e7448 100644
--- 
a/python/flink_agents/e2e_tests/e2e_tests_integration/e2e_tests_mcp/mcp_test.py
+++ 
b/python/flink_agents/e2e_tests/e2e_tests_integration/e2e_tests_mcp/mcp_test.py
@@ -46,13 +46,12 @@ from flink_agents.api.decorators import (
 from flink_agents.api.events.chat_event import ChatRequestEvent, 
ChatResponseEvent
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
-from flink_agents.api.resource import ResourceDescriptor
+from flink_agents.api.resource import (
+    Constant,
+    ResourceDescriptor,
+)
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.e2e_tests.test_utils import pull_model
-from flink_agents.integrations.chat_models.ollama_chat_model import (
-    OllamaChatModelConnection,
-    OllamaChatModelSetup,
-)
 from flink_agents.integrations.mcp.mcp import MCPServer
 
 OLLAMA_MODEL = os.environ.get("MCP_OLLAMA_CHAT_MODEL", "qwen3:1.7b")
@@ -80,7 +79,7 @@ class MyMCPAgent(Agent):
     def ollama_connection() -> ResourceDescriptor:
         """ChatModelConnection for Ollama."""
         return ResourceDescriptor(
-            clazz=OllamaChatModelConnection, request_timeout=240.0
+            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, request_timeout=240.0
         )
 
     @chat_model_setup
@@ -88,7 +87,7 @@ class MyMCPAgent(Agent):
     def math_chat_model() -> ResourceDescriptor:
         """ChatModel using MCP prompt and tool."""
         return ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
             connection="ollama_connection",
             model=OLLAMA_MODEL,
             prompt="ask_sum",  # MCP prompt registered from my_mcp_server
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_integration/react_agent_test.py 
b/python/flink_agents/e2e_tests/e2e_tests_integration/react_agent_test.py
index 7e688b5c..dee3bcea 100644
--- a/python/flink_agents/e2e_tests/e2e_tests_integration/react_agent_test.py
+++ b/python/flink_agents/e2e_tests/e2e_tests_integration/react_agent_test.py
@@ -34,14 +34,14 @@ from flink_agents.api.chat_message import ChatMessage, 
MessageRole
 from flink_agents.api.core_options import AgentConfigOptions, 
ErrorHandlingStrategy
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
 from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import ResourceDescriptor
+from flink_agents.api.resource import (
+    Constant,
+    ResourceDescriptor,
+    ResourceType,
+)
 from flink_agents.api.tools.tool import Tool
 from flink_agents.e2e_tests.e2e_tests_integration.react_agent_tools import 
add, multiply
 from flink_agents.e2e_tests.test_utils import pull_model
-from flink_agents.integrations.chat_models.ollama_chat_model import (
-    OllamaChatModelConnection,
-    OllamaChatModelSetup,
-)
 
 current_dir = Path(__file__).parent
 
@@ -86,10 +86,11 @@ def test_react_agent_on_local_runner() -> None:  # noqa: 
D103
     (
         env.add_resource(
             "ollama",
-            ResourceDescriptor(clazz=OllamaChatModelConnection, 
request_timeout=240.0),
+            ResourceType.CHAT_MODEL_CONNECTION,
+            ResourceDescriptor(clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, 
request_timeout=240.0),
         )
-        .add_resource("add", Tool.from_callable(add))
-        .add_resource("multiply", Tool.from_callable(multiply))
+        .add_resource("add", ResourceType.TOOL,  Tool.from_callable(add))
+        .add_resource("multiply", ResourceType.TOOL, 
Tool.from_callable(multiply))
     )
 
     # prepare prompt
@@ -106,7 +107,7 @@ def test_react_agent_on_local_runner() -> None:  # noqa: 
D103
     # create ReAct agent.
     agent = ReActAgent(
         chat_model=ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
             connection="ollama",
             model=OLLAMA_MODEL,
             tools=["add", "multiply"],
@@ -164,10 +165,11 @@ def test_react_agent_on_remote_runner(tmp_path: Path) -> 
None:  # noqa: D103
     (
         env.add_resource(
             "ollama",
-            ResourceDescriptor(clazz=OllamaChatModelConnection, 
request_timeout=240.0),
+            ResourceType.CHAT_MODEL_CONNECTION,
+            ResourceDescriptor(clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, 
request_timeout=240.0),
         )
-        .add_resource("add", Tool.from_callable(add))
-        .add_resource("multiply", Tool.from_callable(multiply))
+        .add_resource("add", ResourceType.TOOL, Tool.from_callable(add))
+        .add_resource("multiply", ResourceType.TOOL, 
Tool.from_callable(multiply))
     )
 
     # prepare prompt
@@ -189,7 +191,7 @@ def test_react_agent_on_remote_runner(tmp_path: Path) -> 
None:  # noqa: D103
     # create ReAct agent.
     agent = ReActAgent(
         chat_model=ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
             connection="ollama",
             model=OLLAMA_MODEL,
             tools=["add", "multiply"],
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/chat_model_cross_language_agent.py
 
b/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/chat_model_cross_language_agent.py
index 929500f6..44ab63ce 100644
--- 
a/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/chat_model_cross_language_agent.py
+++ 
b/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/chat_model_cross_language_agent.py
@@ -19,10 +19,6 @@ import os
 
 from flink_agents.api.agents.agent import Agent
 from flink_agents.api.chat_message import ChatMessage, MessageRole
-from flink_agents.api.chat_models.java_chat_model import (
-    JavaChatModelConnection,
-    JavaChatModelSetup,
-)
 from flink_agents.api.decorators import (
     action,
     chat_model_connection,
@@ -33,11 +29,11 @@ from flink_agents.api.decorators import (
 from flink_agents.api.events.chat_event import ChatRequestEvent, 
ChatResponseEvent
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import ResourceDescriptor
-from flink_agents.api.runner_context import RunnerContext
-from flink_agents.integrations.chat_models.ollama_chat_model import (
-    OllamaChatModelConnection,
+from flink_agents.api.resource import (
+    Constant,
+    ResourceDescriptor,
 )
+from flink_agents.api.runner_context import RunnerContext
 
 
 class ChatModelCrossLanguageAgent(Agent):
@@ -73,7 +69,7 @@ class ChatModelCrossLanguageAgent(Agent):
     def ollama_connection_python() -> ResourceDescriptor:
         """ChatModelConnection responsible for ollama model service 
connection."""
         return ResourceDescriptor(
-            clazz=OllamaChatModelConnection, request_timeout=240.0
+            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, request_timeout=240.0
         )
 
     @chat_model_connection
@@ -81,7 +77,7 @@ class ChatModelCrossLanguageAgent(Agent):
     def ollama_connection_java() -> ResourceDescriptor:
         """ChatModelConnection responsible for ollama model service 
connection."""
         return ResourceDescriptor(
-            clazz=JavaChatModelConnection,
+            clazz=Constant.JAVA_CHAT_MODEL_CONNECTION,
             
java_clazz="org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelConnection",
             endpoint="http://localhost:11434";,
             requestTimeout=120,
@@ -92,7 +88,7 @@ class ChatModelCrossLanguageAgent(Agent):
     def math_chat_model() -> ResourceDescriptor:
         """ChatModel which focus on math, and reuse ChatModelConnection."""
         return ResourceDescriptor(
-            clazz=JavaChatModelSetup,
+            clazz=Constant.JAVA_CHAT_MODEL_SETUP,
             connection="ollama_connection_python",
             
java_clazz="org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelSetup",
             model=os.environ.get("OLLAMA_CHAT_MODEL", "qwen3:1.7b"),
@@ -106,7 +102,7 @@ class ChatModelCrossLanguageAgent(Agent):
     def creative_chat_model() -> ResourceDescriptor:
         """ChatModel which focus on text generate, and reuse 
ChatModelConnection."""
         return ResourceDescriptor(
-            clazz=JavaChatModelSetup,
+            clazz=Constant.JAVA_CHAT_MODEL_SETUP,
             connection="ollama_connection_java",
             
java_clazz="org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelSetup",
             model=os.environ.get("OLLAMA_CHAT_MODEL", "qwen3:1.7b"),
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/embedding_model_cross_language_agent.py
 
b/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/embedding_model_cross_language_agent.py
index ad5c0ebe..ba701898 100644
--- 
a/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/embedding_model_cross_language_agent.py
+++ 
b/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/embedding_model_cross_language_agent.py
@@ -23,12 +23,8 @@ from flink_agents.api.decorators import (
     embedding_model_connection,
     embedding_model_setup,
 )
-from flink_agents.api.embedding_models.java_embedding_model import (
-    JavaEmbeddingModelConnection,
-    JavaEmbeddingModelSetup,
-)
 from flink_agents.api.events.event import InputEvent, OutputEvent
-from flink_agents.api.resource import ResourceDescriptor, ResourceType
+from flink_agents.api.resource import Constant, ResourceDescriptor, 
ResourceType
 from flink_agents.api.runner_context import RunnerContext
 
 
@@ -40,7 +36,7 @@ class EmbeddingModelCrossLanguageAgent(Agent):
     def embedding_model_connection() -> ResourceDescriptor:
         """EmbeddingModelConnection responsible for ollama model service 
connection."""
         return ResourceDescriptor(
-            clazz=JavaEmbeddingModelConnection,
+            clazz=Constant.JAVA_EMBEDDING_MODEL_CONNECTION,
             
java_clazz="org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelConnection",
             host="http://localhost:11434";,
         )
@@ -50,7 +46,7 @@ class EmbeddingModelCrossLanguageAgent(Agent):
     def embedding_model() -> ResourceDescriptor:
         """EmbeddingModel which focus on math, and reuse 
ChatModelConnection."""
         return ResourceDescriptor(
-            clazz=JavaEmbeddingModelSetup,
+            clazz=Constant.JAVA_EMBEDDING_MODEL_SETUP,
             
java_clazz="org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelSetup",
             connection="embedding_model_connection",
             model=os.environ.get("OLLAMA_EMBEDDING_MODEL", 
"nomic-embed-text:latest"),
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/vector_store_cross_language_agent.py
 
b/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/vector_store_cross_language_agent.py
index 831945b7..576dc244 100644
--- 
a/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/vector_store_cross_language_agent.py
+++ 
b/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/vector_store_cross_language_agent.py
@@ -27,28 +27,17 @@ from flink_agents.api.decorators import (
     embedding_model_setup,
     vector_store,
 )
-from flink_agents.api.embedding_models.java_embedding_model import (
-    JavaEmbeddingModelConnection,
-    JavaEmbeddingModelSetup,
-)
 from flink_agents.api.events.context_retrieval_event import (
     ContextRetrievalRequestEvent,
     ContextRetrievalResponseEvent,
 )
 from flink_agents.api.events.event import InputEvent, OutputEvent
-from flink_agents.api.resource import ResourceDescriptor, ResourceType
+from flink_agents.api.resource import Constant, ResourceDescriptor, 
ResourceType
 from flink_agents.api.runner_context import RunnerContext
-from flink_agents.api.vector_stores.java_vector_store import (
-    JavaCollectionManageableVectorStore,
-)
 from flink_agents.api.vector_stores.vector_store import (
     CollectionManageableVectorStore,
     Document,
 )
-from flink_agents.integrations.embedding_models.local.ollama_embedding_model 
import (
-    OllamaEmbeddingModelConnection,
-    OllamaEmbeddingModelSetup,
-)
 
 TEST_COLLECTION = "test_collection"
 MAX_RETRIES_TIMES = 10
@@ -62,12 +51,12 @@ class VectorStoreCrossLanguageAgent(Agent):
         """EmbeddingModelConnection responsible for ollama model service 
connection."""
         if os.environ.get("EMBEDDING_TYPE") == "JAVA":
             return ResourceDescriptor(
-                clazz=JavaEmbeddingModelConnection,
+                clazz=Constant.JAVA_EMBEDDING_MODEL_CONNECTION,
                 
java_clazz="org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelConnection",
                 host="http://localhost:11434";,
             )
         return ResourceDescriptor(
-            clazz=OllamaEmbeddingModelConnection,
+            clazz=Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION,
             host="http://localhost:11434";,
         )
 
@@ -77,7 +66,7 @@ class VectorStoreCrossLanguageAgent(Agent):
         """EmbeddingModel which focus on math, and reuse 
ChatModelConnection."""
         if os.environ.get("EMBEDDING_TYPE") == "JAVA":
             return ResourceDescriptor(
-                clazz=JavaEmbeddingModelSetup,
+                clazz=Constant.JAVA_EMBEDDING_MODEL_SETUP,
                 
java_clazz="org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelSetup",
                 connection="embedding_model_connection",
                 model=os.environ.get(
@@ -85,7 +74,7 @@ class VectorStoreCrossLanguageAgent(Agent):
                 ),
             )
         return ResourceDescriptor(
-            clazz=OllamaEmbeddingModelSetup,
+            clazz=Constant.OLLAMA_EMBEDDING_MODEL_SETUP,
             connection="embedding_model_connection",
             model=os.environ.get("OLLAMA_EMBEDDING_MODEL", 
"nomic-embed-text:latest"),
         )
@@ -95,7 +84,7 @@ class VectorStoreCrossLanguageAgent(Agent):
     def vector_store() -> ResourceDescriptor:
         """Vector store setup for knowledge base."""
         return ResourceDescriptor(
-            clazz=JavaCollectionManageableVectorStore,
+            clazz=Constant.JAVA_COLLECTION_MANAGEABLE_VECTOR_STORE,
             
java_clazz="org.apache.flink.agents.integrations.vectorstores.elasticsearch.ElasticsearchVectorStore",
             embedding_model="embedding_model",
             host=os.environ.get("ES_HOST"),
diff --git a/python/flink_agents/e2e_tests/long_term_memory_test.py 
b/python/flink_agents/e2e_tests/long_term_memory_test.py
index 8c068e35..b75265f0 100644
--- a/python/flink_agents/e2e_tests/long_term_memory_test.py
+++ b/python/flink_agents/e2e_tests/long_term_memory_test.py
@@ -53,17 +53,12 @@ from flink_agents.api.memory.long_term_memory import (
     MemorySetItem,
     SummarizationStrategy,
 )
-from flink_agents.api.resource import ResourceDescriptor
+from flink_agents.api.resource import (
+    Constant,
+    ResourceDescriptor,
+)
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.e2e_tests.test_utils import pull_model
-from flink_agents.integrations.chat_models.ollama_chat_model import (
-    OllamaChatModelConnection,
-    OllamaChatModelSetup,
-)
-from flink_agents.integrations.embedding_models.local.ollama_embedding_model 
import (
-    OllamaEmbeddingModelConnection,
-    OllamaEmbeddingModelSetup,
-)
 from flink_agents.integrations.vector_stores.chroma.chroma_vector_store import 
(
     ChromaVectorStore,
 )
@@ -128,7 +123,7 @@ class LongTermMemoryAgent(Agent):
     def ollama_connection() -> ResourceDescriptor:
         """ChatModelConnection responsible for ollama model service 
connection."""
         return ResourceDescriptor(
-            clazz=OllamaChatModelConnection, request_timeout=240.0
+            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, request_timeout=240.0
         )
 
     @chat_model_setup
@@ -136,7 +131,7 @@ class LongTermMemoryAgent(Agent):
     def ollama_qwen3() -> ResourceDescriptor:
         """ChatModel which focus on math, and reuse ChatModelConnection."""
         return ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
             connection="ollama_connection",
             model=OLLAMA_CHAT_MODEL,
             extract_reasoning=True,
@@ -146,14 +141,14 @@ class LongTermMemoryAgent(Agent):
     @staticmethod
     def ollama_embedding_connection() -> ResourceDescriptor:  # noqa D102
         return ResourceDescriptor(
-            clazz=OllamaEmbeddingModelConnection, request_timeout=240.0
+            clazz=Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION, 
request_timeout=240.0
         )
 
     @embedding_model_setup
     @staticmethod
     def ollama_nomic_embed_text() -> ResourceDescriptor:  # noqa D102
         return ResourceDescriptor(
-            clazz=OllamaEmbeddingModelSetup,
+            clazz=Constant.OLLAMA_EMBEDDING_MODEL_SETUP,
             connection="ollama_embedding_connection",
             model=OLLAMA_EMBEDDING_MODEL,
         )
@@ -163,7 +158,7 @@ class LongTermMemoryAgent(Agent):
     def chroma_vector_store() -> ResourceDescriptor:
         """Vector store setup for knowledge base."""
         return ResourceDescriptor(
-            clazz=ChromaVectorStore,
+            clazz=Constant.CHROMA_VECTOR_STORE,
             embedding_model="ollama_nomic_embed_text",
             persist_directory=chromadb_path,
         )
diff --git 
a/python/flink_agents/examples/quickstart/agents/custom_types_and_resources.py 
b/python/flink_agents/examples/quickstart/agents/custom_types_and_resources.py
index fc23f7b1..a150604b 100644
--- 
a/python/flink_agents/examples/quickstart/agents/custom_types_and_resources.py
+++ 
b/python/flink_agents/examples/quickstart/agents/custom_types_and_resources.py
@@ -21,10 +21,7 @@ from pydantic import BaseModel
 
 from flink_agents.api.chat_message import ChatMessage, MessageRole
 from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import ResourceDescriptor
-from flink_agents.integrations.chat_models.ollama_chat_model import (
-    OllamaChatModelConnection,
-)
+from flink_agents.api.resource import Constant, ResourceDescriptor
 
 # Prompt for review analysis agent.
 review_analysis_system_prompt_str = """
@@ -201,5 +198,5 @@ class ProductReviewAnalysisRes(BaseModel):
 
 # ollama chat model connection descriptor
 ollama_server_descriptor = ResourceDescriptor(
-    clazz=OllamaChatModelConnection, request_timeout=120
+    clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, request_timeout=120
 )
diff --git 
a/python/flink_agents/examples/quickstart/agents/product_suggestion_agent.py 
b/python/flink_agents/examples/quickstart/agents/product_suggestion_agent.py
index 57655dca..e82c8130 100644
--- a/python/flink_agents/examples/quickstart/agents/product_suggestion_agent.py
+++ b/python/flink_agents/examples/quickstart/agents/product_suggestion_agent.py
@@ -29,15 +29,12 @@ from flink_agents.api.decorators import (
 from flink_agents.api.events.chat_event import ChatRequestEvent, 
ChatResponseEvent
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import ResourceDescriptor
+from flink_agents.api.resource import Constant, ResourceDescriptor
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.examples.quickstart.agents.custom_types_and_resources import 
(
     ProductSuggestion,
     product_suggestion_prompt,
 )
-from flink_agents.integrations.chat_models.ollama_chat_model import (
-    OllamaChatModelSetup,
-)
 
 if TYPE_CHECKING:
     from flink_agents.examples.quickstart.agents.custom_types_and_resources 
import (
@@ -68,7 +65,7 @@ class ProductSuggestionAgent(Agent):
     def generate_suggestion_model() -> ResourceDescriptor:
         """ChatModel which focus on generating product suggestions."""
         return ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
             connection="ollama_server",
             model="qwen3:8b",
             prompt="generate_suggestion_prompt",
diff --git 
a/python/flink_agents/examples/quickstart/agents/review_analysis_agent.py 
b/python/flink_agents/examples/quickstart/agents/review_analysis_agent.py
index d5b23458..b6abc284 100644
--- a/python/flink_agents/examples/quickstart/agents/review_analysis_agent.py
+++ b/python/flink_agents/examples/quickstart/agents/review_analysis_agent.py
@@ -30,16 +30,13 @@ from flink_agents.api.decorators import (
 from flink_agents.api.events.chat_event import ChatRequestEvent, 
ChatResponseEvent
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import ResourceDescriptor
+from flink_agents.api.resource import Constant, ResourceDescriptor
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.examples.quickstart.agents.custom_types_and_resources import 
(
     ProductReviewAnalysisRes,
     notify_shipping_manager,
     review_analysis_prompt,
 )
-from flink_agents.integrations.chat_models.ollama_chat_model import (
-    OllamaChatModelSetup,
-)
 
 if TYPE_CHECKING:
     from flink_agents.examples.quickstart.agents.custom_types_and_resources 
import (
@@ -84,7 +81,7 @@ class ReviewAnalysisAgent(Agent):
     def review_analysis_model() -> ResourceDescriptor:
         """ChatModel which focus on review analysis."""
         return ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
             connection="ollama_server",
             model="qwen3:8b",
             prompt="review_analysis_prompt",
diff --git a/python/flink_agents/examples/quickstart/react_agent_example.py 
b/python/flink_agents/examples/quickstart/react_agent_example.py
index eb638e90..9f883c15 100644
--- a/python/flink_agents/examples/quickstart/react_agent_example.py
+++ b/python/flink_agents/examples/quickstart/react_agent_example.py
@@ -23,7 +23,7 @@ from pyflink.datastream.connectors.file_system import 
FileSource, StreamFormat
 
 from flink_agents.api.agents.react_agent import ReActAgent
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
-from flink_agents.api.resource import ResourceDescriptor
+from flink_agents.api.resource import Constant, ResourceDescriptor, 
ResourceType
 from flink_agents.api.tools.tool import Tool
 from flink_agents.examples.quickstart.agents.custom_types_and_resources import 
(
     ProductReview,
@@ -31,10 +31,6 @@ from 
flink_agents.examples.quickstart.agents.custom_types_and_resources import (
     notify_shipping_manager,
     review_analysis_react_prompt,
 )
-from flink_agents.integrations.chat_models.ollama_chat_model import (
-    OllamaChatModelConnection,
-    OllamaChatModelSetup,
-)
 
 current_dir = Path(__file__).parent
 
@@ -57,9 +53,10 @@ def main() -> None:
     # by the Agent.
     agents_env.add_resource(
         "ollama_server",
-        ResourceDescriptor(clazz=OllamaChatModelConnection, 
request_timeout=120),
+        ResourceType.CHAT_MODEL_CONNECTION,
+        ResourceDescriptor(clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, 
request_timeout=120),
     ).add_resource(
-        "notify_shipping_manager", Tool.from_callable(notify_shipping_manager)
+        "notify_shipping_manager", ResourceType.TOOL, 
Tool.from_callable(notify_shipping_manager)
     )
 
     # Read product reviews from a text file as a streaming source.
@@ -82,7 +79,7 @@ def main() -> None:
     # Create react agent
     review_analysis_react_agent = ReActAgent(
         chat_model=ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
             connection="ollama_server",
             model="qwen3:8b",
             tools=["notify_shipping_manager"],
diff --git 
a/python/flink_agents/examples/quickstart/workflow_multiple_agent_example.py 
b/python/flink_agents/examples/quickstart/workflow_multiple_agent_example.py
index aa55c19f..c133008f 100644
--- a/python/flink_agents/examples/quickstart/workflow_multiple_agent_example.py
+++ b/python/flink_agents/examples/quickstart/workflow_multiple_agent_example.py
@@ -32,6 +32,7 @@ from pyflink.datastream.window import (
 )
 
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
+from flink_agents.api.resource import ResourceType
 from flink_agents.examples.quickstart.agents.custom_types_and_resources import 
(
     ProductReview,
     ProductReviewSummary,
@@ -115,6 +116,7 @@ def main() -> None:
     # and ProductSuggestionAgent.
     agents_env.add_resource(
         "ollama_server",
+        ResourceType.CHAT_MODEL_CONNECTION,
         ollama_server_descriptor,
     )
 
diff --git 
a/python/flink_agents/examples/quickstart/workflow_single_agent_example.py 
b/python/flink_agents/examples/quickstart/workflow_single_agent_example.py
index 59039ee3..e338e81b 100644
--- a/python/flink_agents/examples/quickstart/workflow_single_agent_example.py
+++ b/python/flink_agents/examples/quickstart/workflow_single_agent_example.py
@@ -22,6 +22,7 @@ from pyflink.datastream import StreamExecutionEnvironment
 from pyflink.datastream.connectors.file_system import FileSource, StreamFormat
 
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
+from flink_agents.api.resource import ResourceType
 from flink_agents.examples.quickstart.agents.custom_types_and_resources import 
(
     ProductReview,
     ollama_server_descriptor,
@@ -49,6 +50,7 @@ def main() -> None:
     # Add Ollama chat model connection to be used by the ReviewAnalysisAgent.
     agents_env.add_resource(
         "ollama_server",
+        ResourceType.CHAT_MODEL_CONNECTION,
         ollama_server_descriptor,
     )
 
diff --git a/python/flink_agents/examples/rag/rag_agent_example.py 
b/python/flink_agents/examples/rag/rag_agent_example.py
index 11099056..ec31881d 100644
--- a/python/flink_agents/examples/rag/rag_agent_example.py
+++ b/python/flink_agents/examples/rag/rag_agent_example.py
@@ -34,20 +34,13 @@ from flink_agents.api.events.context_retrieval_event import 
(
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
 from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import ResourceDescriptor, ResourceType
+from flink_agents.api.resource import (
+    Constant,
+    ResourceDescriptor,
+    ResourceType,
+)
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.examples.rag.knowledge_base_setup import 
populate_knowledge_base
-from flink_agents.integrations.chat_models.ollama_chat_model import (
-    OllamaChatModelConnection,
-    OllamaChatModelSetup,
-)
-from flink_agents.integrations.embedding_models.local.ollama_embedding_model 
import (
-    OllamaEmbeddingModelConnection,
-    OllamaEmbeddingModelSetup,
-)
-from flink_agents.integrations.vector_stores.chroma.chroma_vector_store import 
(
-    ChromaVectorStore,
-)
 
 OLLAMA_CHAT_MODEL = os.environ.get("OLLAMA_CHAT_MODEL", "qwen3:8b")
 OLLAMA_EMBEDDING_MODEL = os.environ.get("OLLAMA_EMBEDDING_MODEL", 
"nomic-embed-text")
@@ -85,7 +78,7 @@ Please provide a helpful answer based on the context 
provided."""
     def text_embedder() -> ResourceDescriptor:
         """Embedding model setup for generating text embeddings."""
         return ResourceDescriptor(
-            clazz=OllamaEmbeddingModelSetup,
+            clazz=Constant.OllamaEmbeddingModelSetup,
             connection="ollama_embedding_connection",
             model=OLLAMA_EMBEDDING_MODEL,
         )
@@ -95,7 +88,7 @@ Please provide a helpful answer based on the context 
provided."""
     def knowledge_base() -> ResourceDescriptor:
         """Vector store setup for knowledge base."""
         return ResourceDescriptor(
-            clazz=ChromaVectorStore,
+            clazz=Constant.CHROMA_VECTOR_STORE,
             embedding_model="text_embedder",
             collection="example_knowledge_base",
         )
@@ -105,7 +98,7 @@ Please provide a helpful answer based on the context 
provided."""
     def chat_model() -> ResourceDescriptor:
         """Chat model setup for generating responses."""
         return ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
             connection="ollama_chat_connection",
             model=OLLAMA_CHAT_MODEL
         )
@@ -181,8 +174,8 @@ if __name__ == "__main__":
     agents_env = AgentsExecutionEnvironment.get_execution_environment()
 
     # Setup Ollama embedding and chat model connections
-    agents_env.add_resource("ollama_embedding_connection", 
ResourceDescriptor(clazz=OllamaEmbeddingModelConnection))
-    agents_env.add_resource("ollama_chat_connection", 
ResourceDescriptor(clazz=OllamaChatModelConnection))
+    agents_env.add_resource("ollama_embedding_connection", 
ResourceType.EMBEDDING_MODEL_CONNECTION, 
ResourceDescriptor(clazz=Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION))
+    agents_env.add_resource("ollama_chat_connection", 
ResourceType.EMBEDDING_MODEL, 
ResourceDescriptor(clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION))
 
     output_list = agents_env.from_list(input_list).apply(agent).to_list()
 
diff --git a/python/flink_agents/plan/resource_provider.py 
b/python/flink_agents/plan/resource_provider.py
index 0bd45de1..4894b998 100644
--- a/python/flink_agents/plan/resource_provider.py
+++ b/python/flink_agents/plan/resource_provider.py
@@ -99,7 +99,7 @@ class PythonResourceProvider(ResourceProvider):
         return PythonResourceProvider(
                     name=name,
                     type=clazz.resource_type(),
-                    descriptor= descriptor,
+                    descriptor=descriptor,
                 )
 
 
diff --git 
a/python/flink_agents/plan/tests/compatibility/python_agent_plan_compatibility_test_agent.py
 
b/python/flink_agents/plan/tests/compatibility/python_agent_plan_compatibility_test_agent.py
index 1379b874..ee007568 100644
--- 
a/python/flink_agents/plan/tests/compatibility/python_agent_plan_compatibility_test_agent.py
+++ 
b/python/flink_agents/plan/tests/compatibility/python_agent_plan_compatibility_test_agent.py
@@ -60,7 +60,7 @@ class PythonAgentPlanCompatibilityTestAgent(Agent):
     def chat_model() -> ResourceDescriptor:
         """ChatModel can be used in action."""
         return ResourceDescriptor(
-            clazz=MockChatModel, name="chat_model", prompt="prompt", 
tools=["add"]
+            clazz=f"{MockChatModel.__module__}.{MockChatModel.__name__}", 
name="chat_model", prompt="prompt", tools=["add"]
         )
 
     @tool
diff --git a/python/flink_agents/plan/tests/test_agent_plan.py 
b/python/flink_agents/plan/tests/test_agent_plan.py
index 1657a02e..6050c485 100644
--- a/python/flink_agents/plan/tests/test_agent_plan.py
+++ b/python/flink_agents/plan/tests/test_agent_plan.py
@@ -183,7 +183,7 @@ class MyAgent(Agent):  # noqa: D101
     @staticmethod
     def mock() -> ResourceDescriptor:  # noqa: D102
         return ResourceDescriptor(
-            clazz=MockChatModelImpl,
+            
clazz=f"{MockChatModelImpl.__module__}.{MockChatModelImpl.__name__}",
             host="8.8.8.8",
             desc="mock resource just for testing.",
             connection="mock",
@@ -193,14 +193,14 @@ class MyAgent(Agent):  # noqa: D101
     @staticmethod
     def mock_embedding_conn() -> ResourceDescriptor:  # noqa: D102
         return ResourceDescriptor(
-            clazz=MockEmbeddingModelConnection, api_key="mock-api-key"
+            
clazz=f"{MockEmbeddingModelConnection.__module__}.{MockEmbeddingModelConnection.__name__}",
 api_key="mock-api-key"
         )
 
     @embedding_model_setup
     @staticmethod
     def mock_embedding() -> ResourceDescriptor:  # noqa: D102
         return ResourceDescriptor(
-            clazz=MockEmbeddingModelSetup,
+            
clazz=f"{MockEmbeddingModelSetup.__module__}.{MockEmbeddingModelSetup.__name__}",
             model="test-model",
             connection="mock_embedding_conn",
         )
@@ -209,7 +209,7 @@ class MyAgent(Agent):  # noqa: D101
     @staticmethod
     def mock_vector_store() -> ResourceDescriptor:  # noqa: D102
         return ResourceDescriptor(
-            clazz=MockVectorStore,
+            clazz=f"{MockVectorStore.__module__}.{MockVectorStore.__name__}",
             embedding_model="mock_embedding",
             host="localhost",
             port=8000,
@@ -272,8 +272,9 @@ def test_add_action_and_resource_to_agent() -> None:  # 
noqa: D103
     )
     my_agent.add_resource(
         name="mock",
+        resource_type=ResourceType.CHAT_MODEL,
         instance=ResourceDescriptor(
-            clazz=MockChatModelImpl,
+            
clazz=f"{MockChatModelImpl.__module__}.{MockChatModelImpl.__name__}",
             host="8.8.8.8",
             desc="mock resource just for testing.",
             connection="mock",
@@ -282,22 +283,25 @@ def test_add_action_and_resource_to_agent() -> None:  # 
noqa: D103
 
     my_agent.add_resource(
         name="mock_embedding_conn",
+        resource_type=ResourceType.EMBEDDING_MODEL_CONNECTION,
         instance=ResourceDescriptor(
-            clazz=MockEmbeddingModelConnection, api_key="mock-api-key"
+            
clazz=f"{MockEmbeddingModelConnection.__module__}.{MockEmbeddingModelConnection.__name__}",
 api_key="mock-api-key"
         ),
     )
     my_agent.add_resource(
         name="mock_embedding",
+        resource_type=ResourceType.EMBEDDING_MODEL,
         instance=ResourceDescriptor(
-            clazz=MockEmbeddingModelSetup,
+            
clazz=f"{MockEmbeddingModelSetup.__module__}.{MockEmbeddingModelSetup.__name__}",
             model="test-model",
             connection="mock_embedding_conn",
         ),
     )
     my_agent.add_resource(
         name="mock_vector_store",
+        resource_type=ResourceType.VECTOR_STORE,
         instance=ResourceDescriptor(
-            clazz=MockVectorStore,
+            clazz=f"{MockVectorStore.__module__}.{MockVectorStore.__name__}",
             embedding_model="mock_embedding",
             host="localhost",
             port=8000,
diff --git a/python/flink_agents/plan/tests/test_resource_provider.py 
b/python/flink_agents/plan/tests/test_resource_provider.py
index 5c64f87a..8a0597f6 100644
--- a/python/flink_agents/plan/tests/test_resource_provider.py
+++ b/python/flink_agents/plan/tests/test_resource_provider.py
@@ -40,7 +40,7 @@ def resource_provider() -> ResourceProvider:  # noqa: D103
     return PythonResourceProvider(
         name="mock",
         type=MockChatModelImpl.resource_type(),
-        descriptor=ResourceDescriptor(clazz=MockChatModelImpl, host="8.8.8.8", 
desc="mock chat model"),
+        
descriptor=ResourceDescriptor(clazz=f"{MockChatModelImpl.__module__}.{MockChatModelImpl.__name__}",
 host="8.8.8.8", desc="mock chat model"),
     )
 
 
diff --git a/python/flink_agents/runtime/tests/test_built_in_actions.py 
b/python/flink_agents/runtime/tests/test_built_in_actions.py
index 240dbac3..618b0599 100644
--- a/python/flink_agents/runtime/tests/test_built_in_actions.py
+++ b/python/flink_agents/runtime/tests/test_built_in_actions.py
@@ -126,14 +126,14 @@ class MyAgent(Agent):
     @staticmethod
     def mock_connection() -> ResourceDescriptor:
         """Chat model server can be used by ChatModel."""
-        return ResourceDescriptor(clazz=MockChatModelConnection)
+        return 
ResourceDescriptor(clazz=f"{MockChatModelConnection.__module__}.{MockChatModelConnection.__name__}")
 
     @chat_model_setup
     @staticmethod
     def mock_chat_model() -> ResourceDescriptor:
         """Chat model can be used in action."""
         return ResourceDescriptor(
-            clazz=MockChatModel,
+            clazz=f"{MockChatModel.__module__}.{MockChatModel.__name__}",
             connection="mock_connection",
             prompt="prompt",
             tools=["add"],
diff --git a/python/flink_agents/runtime/tests/test_get_resource_in_action.py 
b/python/flink_agents/runtime/tests/test_get_resource_in_action.py
index e4cc9f82..bd02ca35 100644
--- a/python/flink_agents/runtime/tests/test_get_resource_in_action.py
+++ b/python/flink_agents/runtime/tests/test_get_resource_in_action.py
@@ -46,7 +46,7 @@ class MyAgent(Agent):  # noqa: D101
     @chat_model_setup
     @staticmethod
     def mock_chat_model() -> ResourceDescriptor:  # noqa: D102
-        return ResourceDescriptor(clazz=MockChatModelImpl, host="8.8.8.8",
+        return 
ResourceDescriptor(clazz=f"{MockChatModelImpl.__module__}.{MockChatModelImpl.__name__}",
 host="8.8.8.8",
                                   desc="mock chat model just for testing.", 
connection="mock")
 
     @tool

Reply via email to