This is an automated email from the ASF dual-hosted git repository.

xtsong pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-agents.git

commit fa5bb7eb7d8041e25c783525570f22b65c916816
Author: youjin <[email protected]>
AuthorDate: Mon Jan 26 12:02:40 2026 +0800

    [doc] Introduce Hierarchical ResourceClassName for Cross-Language Resource 
Creation
---
 docs/content/docs/development/chat_models.md       | 73 +++++++++++-----------
 docs/content/docs/development/embedding_models.md  | 46 ++++++--------
 docs/content/docs/development/mcp.md               | 18 +++---
 docs/content/docs/development/react_agent.md       |  4 +-
 docs/content/docs/development/vector_stores.md     | 51 +++++++--------
 docs/content/docs/development/workflow_agent.md    |  4 +-
 .../docs/get-started/quickstart/react_agent.md     |  6 +-
 .../docs/get-started/quickstart/workflow_agent.md  |  4 +-
 8 files changed, 97 insertions(+), 109 deletions(-)

diff --git a/docs/content/docs/development/chat_models.md 
b/docs/content/docs/development/chat_models.md
index 776beaa9..1bcbcd7f 100644
--- a/docs/content/docs/development/chat_models.md
+++ b/docs/content/docs/development/chat_models.md
@@ -65,7 +65,7 @@ class MyAgent(Agent):
     @staticmethod
     def ollama_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION,
+            clazz=ResourceName.ChatModel.OLLAMA_CONNECTION,
             base_url="http://localhost:11434";,
             request_timeout=30.0
         )
@@ -74,7 +74,7 @@ class MyAgent(Agent):
     @staticmethod
     def ollama_chat_model() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.OLLAMA_SETUP,
             connection="ollama_connection",
             model="qwen3:8b",
             temperature=0.7
@@ -106,14 +106,14 @@ class MyAgent(Agent):
 public class MyAgent extends Agent {
     @ChatModelConnection
     public static ResourceDescriptor ollamaConnection() {
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_CONNECTION)
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.OLLAMA_CONNECTION)
                 .addInitialArgument("endpoint", "http://localhost:11434";)
                 .build();
     }
 
     @ChatModelSetup
     public static ResourceDescriptor ollamaChatModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_SETUP)
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.OLLAMA_SETUP)
                 .addInitialArgument("connection", "ollamaConnection")
                 .addInitialArgument("model", "qwen3:8b")
                 .build();
@@ -196,7 +196,7 @@ Azure AI is only supported in Java currently. To use Azure 
AI from Python agents
 public class MyAgent extends Agent {
     @ChatModelConnection
     public static ResourceDescriptor azureAIConnection() {
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.AZURE_CHAT_MODEL_CONNECTION)
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.AZURE_CONNECTION)
                 .addInitialArgument("endpoint", 
"https://your-resource.inference.ai.azure.com";)
                 .addInitialArgument("apiKey", "your-api-key-here")
                 .build();
@@ -204,7 +204,7 @@ public class MyAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor azureAIChatModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.AZURE_CHAT_MODEL_SETUP)
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.AZURE_SETUP)
                 .addInitialArgument("connection", "azureAIConnection")
                 .addInitialArgument("model", "gpt-4o")
                 .build();
@@ -314,7 +314,7 @@ class MyAgent(Agent):
     @staticmethod
     def anthropic_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.ANTHROPIC_CHAT_MODEL_CONNECTION,
+            clazz=ResourceName.ChatModel.ANTHROPIC_CONNECTION,
             api_key="<your-api-key>",
             max_retries=3,
             timeout=60.0
@@ -324,7 +324,7 @@ class MyAgent(Agent):
     @staticmethod
     def anthropic_chat_model() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.ANTHROPIC_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.ANTHROPIC_SETUP,
             connection="anthropic_connection",
             model="claude-sonnet-4-20250514",
             max_tokens=2048,
@@ -340,7 +340,7 @@ class MyAgent(Agent):
 public class MyAgent extends Agent {
     @ChatModelConnection
     public static ResourceDescriptor anthropicConnection() {
-        return 
ResourceDescriptor.Builder.newBuilder(AnthropicChatModelConnection.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.ANTHROPIC_CONNECTION)
                 .addInitialArgument("api_key", "<your-api-key>")
                 .addInitialArgument("timeout", 120)
                 .addInitialArgument("max_retries", 3)
@@ -349,7 +349,7 @@ public class MyAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor anthropicChatModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(AnthropicChatModelSetup.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.ANTHROPIC_SETUP)
                 .addInitialArgument("connection", "anthropicConnection")
                 .addInitialArgument("model", "claude-sonnet-4-20250514")
                 .addInitialArgument("temperature", 0.7d)
@@ -456,7 +456,7 @@ class MyAgent(Agent):
     @staticmethod
     def ollama_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION,
+            clazz=ResourceName.ChatModel.OLLAMA_CONNECTION,
             base_url="http://localhost:11434";,
             request_timeout=120.0
         )
@@ -465,7 +465,7 @@ class MyAgent(Agent):
     @staticmethod
     def my_chat_model() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION,
+            clazz=ResourceName.ChatModel.OLLAMA_CONNECTION,
             connection="ollama_connection",
             model="qwen3:8b",
             temperature=0.7,
@@ -483,7 +483,7 @@ class MyAgent(Agent):
 public class MyAgent extends Agent {
     @ChatModelConnection
     public static ResourceDescriptor ollamaConnection() {
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_CONNECTION)
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.OLLAMA_CONNECTION)
                 .addInitialArgument("endpoint", "http://localhost:11434";)
                 .addInitialArgument("requestTimeout", 120)
                 .build();
@@ -491,7 +491,7 @@ public class MyAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor ollamaChatModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_SETUP)
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.OLLAMA_SETUP)
                 .addInitialArgument("connection", "ollamaConnection")
                 .addInitialArgument("model", "qwen3:8b")
                 .build();
@@ -614,7 +614,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OPENAI_CHAT_MODEL_CONNECTION,
+            clazz=ResourceName.ChatModel.OPENAI_CONNECTION,
             api_key="<your-api-key>",
             api_base_url="https://api.openai.com/v1";,
             max_retries=3,
@@ -625,7 +625,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_chat_model() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OPENAI_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.OPENAI_SETUP,
             connection="openai_connection",
             model="gpt-4",
             temperature=0.7,
@@ -641,7 +641,7 @@ class MyAgent(Agent):
 public class MyAgent extends Agent {
     @ChatModelConnection
     public static ResourceDescriptor openaiConnection() {
-        return 
ResourceDescriptor.Builder.newBuilder(OpenAIChatModelConnection.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.OPENAI_CONNECTION)
                 .addInitialArgument("api_key", "<your-api-key>")
                 .addInitialArgument("api_base_url", 
"https://api.openai.com/v1";)
                 .addInitialArgument("timeout", 60)
@@ -651,7 +651,7 @@ public class MyAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor openaiChatModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(OpenAIChatModelSetup.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.OPENAI_SETUP)
                 .addInitialArgument("connection", "openaiConnection")
                 .addInitialArgument("model", "gpt-4")
                 .addInitialArgument("temperature", 0.7d)
@@ -719,7 +719,7 @@ class MyAgent(Agent):
     @staticmethod
     def tongyi_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.TONGYI_CHAT_MODEL_CONNECTION,
+            clazz=ResourceName.ChatModel.TONGYI_CONNECTION,
             api_key="your-api-key-here",  # Or set DASHSCOPE_API_KEY env var
             request_timeout=60.0
         )
@@ -728,7 +728,7 @@ class MyAgent(Agent):
     @staticmethod
     def tongyi_chat_model() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.TONGYI_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.Python.TONGYI_SETUP,
             connection="tongyi_connection",
             model="qwen-plus",
             temperature=0.7,
@@ -758,6 +758,7 @@ Flink Agents supports cross-language chat model 
integration, allowing you to use
 
 {{< hint warning >}}
 **Limitations:**
+
 - Cross-language resources are currently supported only when [running in 
Flink]({{< ref "docs/operations/deployment#run-in-flink" >}}), not in local 
development mode
 - Complex object serialization between languages may have limitations
 {{< /hint >}}
@@ -766,8 +767,8 @@ Flink Agents supports cross-language chat model 
integration, allowing you to use
 
 To leverage chat model supports provided in a different language, you need to 
declare the resource within a built-in cross-language wrapper, and specify the 
target provider as an argument:
 
-- **Using Java chat models in Python**: Use 
`Constant.JAVA_CHAT_MODEL_CONNECTION` and `Constant.JAVA_CHAT_MODEL_SETUP`, 
specifying the Java provider class via the `java_clazz` parameter
-- **Using Python chat models in Java**: Use 
`Constant.PYTHON_CHAT_MODEL_CONNECTION` and `Constant.PYTHON_CHAT_MODEL_SETUP`, 
specifying the Python provider via `module` and `clazz` parameters
+- **Using Java chat models in Python**: Use 
`ResourceName.ChatModel.JAVA_WRAPPER_CONNECTION` and 
`ResourceName.ChatModel.JAVA_WRAPPER_SETUP`, specifying the Java provider class 
via the `java_clazz` parameter
+- **Using Python chat models in Java**: Use 
`ResourceName.ChatModel.PYTHON_WRAPPER_CONNECTION` and 
`ResourceName.ChatModel.PYTHON_WRAPPER_SETUP`, specifying the Python provider 
via the `pythonClazz` parameter
 
 
 
@@ -784,13 +785,13 @@ class MyAgent(Agent):
     def java_chat_model_connection() -> ResourceDescriptor:
         # In pure Java, the equivalent ResourceDescriptor would be:
         # ResourceDescriptor.Builder
-        #     .newBuilder(Constant.OllamaChatModelConnection)
+        #     .newBuilder(ResourceName.ChatModel.OLLAMA_CONNECTION)
         #     .addInitialArgument("endpoint", "http://localhost:11434";)
         #     .addInitialArgument("requestTimeout", 120)
         #     .build();
         return ResourceDescriptor(
-            clazz=Constant.JAVA_CHAT_MODEL_CONNECTION,
-            
java_clazz="org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelConnection",
+            clazz=ResourceName.ChatModel.JAVA_WRAPPER_CONNECTION,
+            java_clazz=ResourceName.ChatModel.Java.OLLAMA_CONNECTION,
             endpoint="http://localhost:11434";,
             requestTimeout=120,
         )
@@ -801,7 +802,7 @@ class MyAgent(Agent):
     def java_chat_model() -> ResourceDescriptor:
         # In pure Java, the equivalent ResourceDescriptor would be:
         # ResourceDescriptor.Builder
-        #     .newBuilder(Constant.OllamaChatModelSetup)
+        #     .newBuilder(ResourceName.ChatModel.OLLAMA_SETUP)
         #     .addInitialArgument("connection", "java_chat_model_connection")
         #     .addInitialArgument("model", "qwen3:8b")
         #     .addInitialArgument("prompt", "my_prompt")
@@ -809,8 +810,8 @@ class MyAgent(Agent):
         #     .addInitialArgument("extractReasoning", true)
         #     .build();
         return ResourceDescriptor(
-            clazz=Constant.JAVA_CHAT_MODEL_SETUP,
-               
java_clazz="org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelSetup",
+            clazz=ResourceName.ChatModel.JAVA_WRAPPER_SETUP,
+               java_clazz=ResourceName.ChatModel.Java.OLLAMA_SETUP,
             connection="java_chat_model_connection",
             model="qwen3:8b",
             prompt="my_prompt",
@@ -847,13 +848,11 @@ public class MyAgent extends Agent {
     public static ResourceDescriptor pythonChatModelConnection() {
         // In pure Python, the equivalent ResourceDescriptor would be:
         // ResourceDescriptor(
-        //     clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION,
+        //     clazz=ResourceName.ChatModel.OLLAMA_CONNECTION,
         //     request_timeout=120.0
         // )
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.PYTHON_CHAT_MODEL_CONNECTION)
-                .addInitialArgument(
-                        "module", 
"flink_agents.integrations.chat_models.ollama_chat_model")
-                .addInitialArgument("clazz", "OllamaChatModelConnection")
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.PYTHON_WRAPPER_CONNECTION)
+                .addInitialArgument("pythonClazz", 
ResourceName.ChatModel.Python.OLLAMA_CONNECTION)
                 .addInitialArgument("request_timeout", 120.0)
                 .build();
     }
@@ -862,16 +861,14 @@ public class MyAgent extends Agent {
     public static ResourceDescriptor pythonChatModel() {
         // In pure Python, the equivalent ResourceDescriptor would be:
         // ResourceDescriptor(
-        //     clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+        //     clazz=ResourceName.ChatModel.OLLAMA_SETUP,
         //     connection="pythonChatModelConnection",
         //     model="qwen3:8b",
         //     tools=["tool1", "tool2"],
         //     extract_reasoning=True
         // )
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.PYTHON_CHAT_MODEL_SETUP)
-                .addInitialArgument(
-                        "module", 
"flink_agents.integrations.chat_models.ollama_chat_model")
-                .addInitialArgument("clazz", "OllamaChatModelSetup")
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.PYTHON_WRAPPER_SETUP)
+                .addInitialArgument("pythonClazz", 
ResourceName.ChatModel.Python.OLLAMA_SETUP)
                 .addInitialArgument("connection", "pythonChatModelConnection")
                 .addInitialArgument("model", "qwen3:8b")
                 .addInitialArgument("tools", List.of("tool1", "tool2"))
diff --git a/docs/content/docs/development/embedding_models.md 
b/docs/content/docs/development/embedding_models.md
index 400c477d..dc0af66b 100644
--- a/docs/content/docs/development/embedding_models.md
+++ b/docs/content/docs/development/embedding_models.md
@@ -65,7 +65,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OPENAI_EMBEDDING_MODEL_CONNECTION,
+            clazz=ResourceName.EmbeddingModel.OPENAI_CONNECTION,
             api_key="your-api-key-here",
             base_url="https://api.openai.com/v1";,
             request_timeout=30.0
@@ -75,7 +75,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_embedding() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OPENAI_EMBEDDING_MODEL_SETUP,
+            clazz=ResourceName.EmbeddingModel.OPENAI_SETUP,
             connection="openai_connection",
             model="your-embedding-model-here"
         )
@@ -132,7 +132,7 @@ class MyAgent(Agent):
     @staticmethod
     def ollama_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION,
+            clazz=ResourceName.EmbeddingModel.OLLAMA_CONNECTION,
             base_url="http://localhost:11434";,
             request_timeout=30.0
         )
@@ -141,7 +141,7 @@ class MyAgent(Agent):
     @staticmethod
     def ollama_embedding() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_EMBEDDING_MODEL_SETUP,
+            clazz=ResourceName.EmbeddingModel.OLLAMA_SETUP,
             connection="ollama_connection",
             model="nomic-embed-text",
             truncate=True,
@@ -185,7 +185,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OPENAI_EMBEDDING_MODEL_CONNECTION,
+            clazz=ResourceName.EmbeddingModel.OPENAI_CONNECTION,
             api_key="your-api-key-here",
             base_url="https://api.openai.com/v1";,
             request_timeout=30.0,
@@ -196,7 +196,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_embedding() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OPENAI_CHAT_MODEL_SETUP,
+            clazz=ResourceName.EmbeddingModel.OPENAI_SETUP,
             connection="openai_connection",
             model="your-embedding-model-here",
             encoding_format="float"
@@ -252,8 +252,8 @@ Flink Agents supports cross-language embedding model 
integration, allowing you t
 
 To leverage embedding model supports provided in a different language, you 
need to declare the resource within a built-in cross-language wrapper, and 
specify the target provider as an argument:
 
-- **Using Java embedding models in Python**: Use 
`Constant.JAVA_EMBEDDING_MODEL_CONNECTION` and 
`Constant.JAVA_EMBEDDING_MODEL_SETUP`, specifying the Java provider class via 
the `java_clazz` parameter
-- **Using Python embedding models in Java**: Use 
`Constant.PYTHON_EMBEDDING_MODEL_CONNECTION` and 
`Constant.PYTHON_EMBEDDING_MODEL_SETUP`, specifying the Python provider via 
`module` and `clazz` parameters
+- **Using Java embedding models in Python**: Use 
`ResourceName.EmbeddingModel.JAVA_WRAPPER_CONNECTION` and 
`ResourceName.EmbeddingModel.JAVA_WRAPPER_SETUP`, specifying the Java provider 
class via the `java_clazz` parameter
+- **Using Python embedding models in Java**: Use 
`ResourceName.EmbeddingModel.PYTHON_WRAPPER_CONNECTION` and 
`ResourceName.EmbeddingModel.PYTHON_WRAPPER_SETUP`, specifying the Python 
provider via the `pythonClazz` parameter
 
 ### Usage Example
 
@@ -269,12 +269,12 @@ class MyAgent(Agent):
     def java_embedding_connection() -> ResourceDescriptor:
         # In pure Java, the equivalent ResourceDescriptor would be:
         # ResourceDescriptor.Builder
-        #     .newBuilder(Constant.OllamaEmbeddingModelConnection)
+        #     .newBuilder(ResourceName.EmbeddingModel.OLLAMA_CONNECTION)
         #     .addInitialArgument("host", "http://localhost:11434";)
         #     .build();
         return ResourceDescriptor(
-            clazz=Constant.JAVA_EMBEDDING_MODEL_CONNECTION,
-            
java_clazz="org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelConnection",
+            clazz=ResourceName.EmbeddingModel.JAVA_WRAPPER_CONNECTION,
+            java_clazz=ResourceName.EmbeddingModel.Java.OLLAMA_CONNECTION,
             host="http://localhost:11434";
         )
 
@@ -283,13 +283,13 @@ class MyAgent(Agent):
     def java_embedding_model() -> ResourceDescriptor:
         # In pure Java, the equivalent ResourceDescriptor would be:
         # ResourceDescriptor.Builder
-        #     .newBuilder(Constant.OllamaEmbeddingModelSetup)
+        #     .newBuilder(ResourceName.EmbeddingModel.OLLAMA_SETUP)
         #     .addInitialArgument("connection", "java_embedding_connection")
         #     .addInitialArgument("model", "nomic-embed-text")
         #     .build();
         return ResourceDescriptor(
-            clazz=Constant.JAVA_EMBEDDING_MODEL_SETUP,
-            
java_clazz="org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelSetup",
+            clazz=ResourceName.EmbeddingModel.JAVA_WRAPPER_SETUP,
+            java_clazz=ResourceName.EmbeddingModel.Java.OLLAMA_Setup,
             connection="java_embedding_connection",
             model="nomic-embed-text"
         )
@@ -314,14 +314,11 @@ public class MyAgent extends Agent {
     public static ResourceDescriptor pythonEmbeddingConnection() {
         // In pure Python, the equivalent ResourceDescriptor would be:
         // ResourceDescriptor(
-        //     clazz=Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION,
+        //     clazz=ResourceName.EmbeddingModel.OLLAMA_CONNECTION,
         //     base_url="http://localhost:11434";
         // )
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.PYTHON_EMBEDDING_MODEL_CONNECTION)
-                .addInitialArgument(
-                        "module", 
-                        
"flink_agents.integrations.embedding_models.local.ollama_embedding_model")
-                .addInitialArgument("clazz", "OllamaEmbeddingModelConnection")
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.EmbeddingModel.PYTHON_WRAPPER_CONNECTION)
+                .addInitialArgument("pythonClazz", 
ResourceName.EmbeddingModel.Python.OLLAMA_CONNECTION)
                 .addInitialArgument("base_url", "http://localhost:11434";)
                 .build();
     }
@@ -330,15 +327,12 @@ public class MyAgent extends Agent {
     public static ResourceDescriptor pythonEmbeddingModel() {
         // In pure Python, the equivalent ResourceDescriptor would be:
         // ResourceDescriptor(
-        //     clazz=Constant.OLLAMA_EMBEDDING_MODEL_SETUP,
+        //     clazz=ResourceName.EmbeddingModel.OLLAMA_SETUP,
         //     connection="ollama_connection",
         //     model="nomic-embed-text"
         // )
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.PYTHON_EMBEDDING_MODEL_SETUP)
-                .addInitialArgument(
-                        "module", 
-                        
"flink_agents.integrations.embedding_models.local.ollama_embedding_model")
-                .addInitialArgument("clazz", "OllamaEmbeddingModelSetup")
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.EmbeddingModel.PYTHON_WRAPPER_SETUP)
+                .addInitialArgument("pythonClazz", 
ResourceName.EmbeddingModel.Python.OLLAMA_SETUP)
                 .addInitialArgument("connection", "pythonEmbeddingConnection")
                 .addInitialArgument("model", "nomic-embed-text")
                 .build();
diff --git a/docs/content/docs/development/mcp.md 
b/docs/content/docs/development/mcp.md
index d27b3433..d7aae5ea 100644
--- a/docs/content/docs/development/mcp.md
+++ b/docs/content/docs/development/mcp.md
@@ -1,4 +1,4 @@
----
+from python.flink_agents.api.resource import ResourceName---
 title: MCP
 weight: 9
 type: docs
@@ -40,7 +40,7 @@ class ReviewAnalysisAgent(Agent):
     @staticmethod
     def my_mcp_server() -> ResourceDescriptor:
         """Define MCP server connection."""
-        return ResourceDescriptor(clazz=Constant.MCP_SERVER, 
+        return ResourceDescriptor(clazz=ResourceName.MCP_SERVER, 
                                   endpoint="http://127.0.0.1:8000/mcp";)
 ```
 {{< /tab >}}
@@ -51,7 +51,7 @@ public class ReviewAnalysisAgent extends Agent {
 
     @MCPServer
     public static ResourceDescriptor myMcp() {
-        return ResourceDescriptor.Builder.newBuilder(Constant.MCP_SERVER)
+        return ResourceDescriptor.Builder.newBuilder(ResourceName.MCP_SERVER)
                     .addInitialArgument("endpoint", MCP_ENDPOINT)
                     .addInitialArgument("timeout", 30)
                     .build();
@@ -80,7 +80,7 @@ MCP servers can be configured with authentication:
 @staticmethod
 def authenticated_mcp_server() -> MCPServer:
     """Connect to MCP server with authentication."""
-    return ResourceDescriptor(clazz=Constant.MCP_SERVER, 
+    return ResourceDescriptor(clazz=ResourceName.MCP_SERVER, 
                               endpoint="http://api.example.com/mcp";,
                               headers={"Authorization": "Bearer your-token"})
     # Or using Basic Authentication
@@ -97,7 +97,7 @@ def authenticated_mcp_server() -> MCPServer:
 @MCPServer
 public static org.apache.flink.agents.integrations.mcp.MCPServer 
authenticatedMcpServer() {
     // Using Bearer Token Authentication
-    return ResourceDescriptor.Builder.newBuilder(Constant.MCP_SERVER)
+    return ResourceDescriptor.Builder.newBuilder(ResourceName.MCP_SERVER)
                     .addInitialArgument("endpoint", 
"http://api.example.com/mcp";)
                     .addInitialArgument("timeout", 30)
                     .addInitialArgument("auth", new 
BearerTokenAuth("your-oauth-token"))
@@ -134,14 +134,14 @@ class ReviewAnalysisAgent(Agent):
     @staticmethod
     def review_mcp_server() -> ResourceDescriptor:
         """Connect to MCP server."""
-        return ResourceDescriptor(clazz=Constant.MCP_SERVER, 
+        return ResourceDescriptor(clazz=ResourceName.MCP_SERVER, 
                                   endpoint="http://127.0.0.1:8000/mcp";)
 
     @chat_model_setup
     @staticmethod
     def review_model() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=ResourceName.ChatModel.OLLAMA_SETUP,
             connection="ollama_server",
             model="qwen3:8b",
             # Reference MCP prompt by name like local prompt
@@ -158,7 +158,7 @@ public class ReviewAnalysisAgent extends Agent {
 
     @MCPServer
     public static ResourceDescriptor myMcp() {
-        return ResourceDescriptor.Builder.newBuilder(Constant.MCP_SERVER)
+        return ResourceDescriptor.Builder.newBuilder(ResourceName.MCP_SERVER)
                     .addInitialArgument("endpoint", 
"http://127.0.0.1:8000/mcp";)
                     .addInitialArgument("timeout", 30)
                     .build();
@@ -166,7 +166,7 @@ public class ReviewAnalysisAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor reviewModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(OllamaChatModelSetup.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.OLLAMA_SETUP)
                 .addInitialArgument("connection", "ollamaChatModelConnection")
                 .addInitialArgument("model", "qwen3:8b")
                 // Reference MCP prompt by name like local prompt
diff --git a/docs/content/docs/development/react_agent.md 
b/docs/content/docs/development/react_agent.md
index 3bcd87c8..e559effc 100644
--- a/docs/content/docs/development/react_agent.md
+++ b/docs/content/docs/development/react_agent.md
@@ -69,7 +69,7 @@ We use `ResourceDescriptor` to describe the chat model, 
includes chat model type
 {{< tab "Python" >}}
 ```python
 chat_model_descriptor = ResourceDescriptor(
-    clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+    clazz=ResourceName.ChatModel.OLLAMA_SETUP,
     connection="my_ollama_connection",
     model="qwen3:8b",
     tools=["my_tool1, my_tool2"],
@@ -80,7 +80,7 @@ chat_model_descriptor = ResourceDescriptor(
 {{< tab "Java" >}}
 ```java
 ResourceDescriptor chatModelDescriptor =
-                
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_SETUP)
+                
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.OLLAMA_SETUP)
                         .addInitialArgument("connection", "myOllamaConnection")
                         .addInitialArgument("model", "qwen3:8b")
                         .addInitialArgument(
diff --git a/docs/content/docs/development/vector_stores.md 
b/docs/content/docs/development/vector_stores.md
index a51a925c..2940da18 100644
--- a/docs/content/docs/development/vector_stores.md
+++ b/docs/content/docs/development/vector_stores.md
@@ -59,7 +59,7 @@ Flink Agents provides decorators/annotations to simplify 
vector store setup with
 @staticmethod
 def my_vector_store() -> ResourceDescriptor:
     return ResourceDescriptor(
-        clazz=Constant.CHROMA_VECTOR_STORE,
+        clazz=ResourceName.VectorStore.CHROMA_VECTOR_STORE,
         embedding_model="embedding_model",
         collection="my_chroma_store"
     )
@@ -70,7 +70,7 @@ def my_vector_store() -> ResourceDescriptor:
 ```java
 @VectorStore
 public static ResourceDescriptor vectorStore() {
-    return 
ResourceDescriptor.Builder.newBuilder(Constant.ELASTICSEARCH_VECTOR_STORE)
+    return 
ResourceDescriptor.Builder.newBuilder(ResourceName.VectorStore.ELASTICSEARCH_VECTOR_STORE)
             .addInitialArgument("embedding_model", "embeddingModel")
             .addInitialArgument("host", "http://localhost:9200";)
             .addInitialArgument("index", "my_documents")
@@ -300,7 +300,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OPENAI_EMBEDDING_MODEL_CONNECTION,
+            clazz=ResourceName.EmbeddingModel.OPENAI_CONNECTION,
             api_key="your-api-key-here"
         )
 
@@ -308,7 +308,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_embedding() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OPENAI_EMBEDDING_MODEL_SETUP,
+            clazz=ResourceName.EmbeddingModel.OPENAI_SETUP,
             connection="openai_connection",
             model="your-embedding-model-here"
         )
@@ -318,7 +318,7 @@ class MyAgent(Agent):
     @staticmethod
     def chroma_store() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.CHROMA_VECTOR_STORE,
+            clazz=ResourceName.VectorStore.CHROMA_VECTOR_STORE,
             embedding_model="openai_embedding",
             collection="my_chroma_store"
         )
@@ -352,14 +352,14 @@ public class MyAgent extends Agent {
 
     @EmbeddingModelConnection
     public static ResourceDescriptor embeddingConnection() {
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION)
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.EmbeddingModel.OLLAMA_CONNECTION)
                 .addInitialArgument("host", "http://localhost:11434";)
                 .build();
     }
 
     @EmbeddingModelSetup
     public static ResourceDescriptor embeddingModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_EMBEDDING_MODEL_SETUP)
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.EmbeddingModel.OLLAMA_SETUP)
                 .addInitialArgument("connection", "embeddingConnection")
                 .addInitialArgument("model", "nomic-embed-text")
                 .build();
@@ -367,7 +367,7 @@ public class MyAgent extends Agent {
 
     @VectorStore
     public static ResourceDescriptor vectorStore() {
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.ELASTICSEARCH_VECTOR_STORE)
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.VectorStore.ELASTICSEARCH_VECTOR_STORE)
                 .addInitialArgument("embedding_model", "embeddingModel")
                 .addInitialArgument("host", "http://localhost:9200";)
                 .addInitialArgument("index", "my_documents")
@@ -446,7 +446,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OPENAI_EMBEDDING_MODEL_CONNECTION,
+            clazz=ResourceName.EmbeddingModel.OPENAI_CONNECTION,
             api_key="your-api-key-here"
         )
 
@@ -454,7 +454,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_embedding() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.OPENAI_EMBEDDING_MODEL_SETUP,
+            clazz=ResourceName.EmbeddingModel.OPENAI_SETUP,
             connection="openai_connection",
           model="your-embedding-model-here"
         )
@@ -464,7 +464,7 @@ class MyAgent(Agent):
     @staticmethod
     def chroma_store() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=Constant.CHROMA_VECTOR_STORE,
+            clazz=ResourceName.VectorStore.CHROMA_VECTOR_STORE,
             embedding_model="openai_embedding",
             persist_directory="/path/to/chroma/data",  # For persistent storage
             collection="my_documents",
@@ -491,7 +491,7 @@ ChromaDB supports multiple deployment modes:
 @staticmethod
 def chroma_store() -> ResourceDescriptor:
     return ResourceDescriptor(
-        clazz=Constant.CHROMA_VECTOR_STORE,
+        clazz=ResourceName.VectorStore.CHROMA_VECTOR_STORE,
         embedding_model="your_embedding_model",
         collection="my_documents"
         # No connection configuration needed for in-memory mode
@@ -504,7 +504,7 @@ def chroma_store() -> ResourceDescriptor:
 @staticmethod
 def chroma_store() -> ResourceDescriptor:
     return ResourceDescriptor(
-        clazz=Constant.CHROMA_VECTOR_STORE,
+        clazz=ResourceName.VectorStore.CHROMA_VECTOR_STORE,
         embedding_model="your_embedding_model",
         persist_directory="/path/to/chroma/data",
         collection="my_documents"
@@ -517,7 +517,7 @@ def chroma_store() -> ResourceDescriptor:
 @staticmethod
 def chroma_store() -> ResourceDescriptor:
     return ResourceDescriptor(
-        clazz=Constant.CHROMA_VECTOR_STORE,
+        clazz=ResourceName.VectorStore.CHROMA_VECTOR_STORE,
         embedding_model="your_embedding_model",
         host="your-chroma-server.com",
         port=8000,
@@ -531,7 +531,7 @@ def chroma_store() -> ResourceDescriptor:
 @staticmethod
 def chroma_store() -> ResourceDescriptor:
     return ResourceDescriptor(
-        clazz=Constant.CHROMA_VECTOR_STORE,
+        clazz=ResourceName.VectorStore.CHROMA_VECTOR_STORE,
         embedding_model="your_embedding_model",
         api_key="your-chroma-cloud-api-key",
         collection="my_documents"
@@ -585,7 +585,7 @@ Here's how to define an Elasticsearch vector store in your 
Java agent:
 ```java
 @VectorStore
 public static ResourceDescriptor vectorStore() {
-    return 
ResourceDescriptor.Builder.newBuilder(Constant.ELASTICSEARCH_VECTOR_STORE)
+    return 
ResourceDescriptor.Builder.newBuilder(ResourceName.VectorStore.ELASTICSEARCH_VECTOR_STORE)
             .addInitialArgument("embedding_model", "embeddingModel")
             .addInitialArgument("host", "http://localhost:9200";)
             .addInitialArgument("index", "my_documents")
@@ -616,8 +616,8 @@ Flink Agents supports cross-language vector store 
integration, allowing you to u
 
 To leverage vector store supports provided in a different language, you need 
to declare the resource within a built-in cross-language wrapper, and specify 
the target provider as an argument:
 
-- **Using Java vector stores in Python**: Use 
`Constant.JAVA_COLLECTION_MANAGEABLE_VECTOR_STORE`, specifying the Java 
provider class via the `java_clazz` parameter
-- **Using Python vector stores in Java**: Use 
`Constant.PYTHON_COLLECTION_MANAGEABLE_VECTOR_STORE`, specifying the Python 
provider via `module` and `clazz` parameters
+- **Using Java vector stores in Python**: Use 
`ResourceName.VectorStore.JAVA_WRAPPER_COLLECTION_MANAGEABLE_VECTOR_STORE`, 
specifying the Java provider class via the `java_clazz` parameter
+- **Using Python vector stores in Java**: Use 
`ResourceName.VectorStore.PYTHON_WRAPPER_COLLECTION_MANAGEABLE_VECTOR_STORE`, 
specifying the Python provider via the `pythonClazz` parameter
 
 ### Usage Example
 
@@ -647,15 +647,15 @@ class MyAgent(Agent):
     def java_vector_store() -> ResourceDescriptor:
         # In pure Java, the equivalent ResourceDescriptor would be:
         # ResourceDescriptor.Builder
-        #     .newBuilder(Constant.ElasticsearchVectorStore)
+        #     .newBuilder(ResourceName.VectorStore.ELASTICSEARCH_VECTOR_STORE)
         #     .addInitialArgument("embedding_model", "my_embedding_model")
         #     .addInitialArgument("host", "http://localhost:9200";)
         #     .addInitialArgument("index", "my_documents")
         #     .addInitialArgument("dims", 768)
         #     .build();
         return ResourceDescriptor(
-            clazz=Constant.JAVA_COLLECTION_MANAGEABLE_VECTOR_STORE,
-            
java_clazz="org.apache.flink.agents.integrations.vectorstores.elasticsearch.ElasticsearchVectorStore",
+            
clazz=ResourceName.VectorStore.Java_WRAPPER_COLLECTION_MANAGEABLE_VECTOR_STORE,
+            
java_clazz=ResourceName.VectorStore.Java.ELASTICSEARCH_VECTOR_STORE,
             embedding_model="my_embedding_model",
             host="http://localhost:9200";,
             index="my_documents",
@@ -699,14 +699,11 @@ public class MyAgent extends Agent {
     public static ResourceDescriptor pythonVectorStore() {
         // In pure Python, the equivalent ResourceDescriptor would be:
         // ResourceDescriptor(
-        //     clazz=Constant.CHROMA_VECTOR_STORE,
+        //     clazz=ResourceName.VectorStore.CHROMA_VECTOR_STORE,
         //     embedding_model="my_embedding_model",
         // )
-        return 
ResourceDescriptor.Builder.newBuilder(PYTHON_COLLECTION_MANAGEABLE_VECTOR_STORE)
-                .addInitialArgument(
-                        "module", 
-                        
"flink_agents.integrations.vector_stores.chroma.chroma_vector_store")
-                .addInitialArgument("clazz", "ChromaVectorStore")
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.VectorStore.PYTHON_WRAPPER_COLLECTION_MANAGEABLE_VECTOR_STORE)
+                .addInitialArgument("pythonClazz", 
ResourceName.VectorStore.Python.CHROMA_VECTOR_STORE)
                 .addInitialArgument("embedding_model", "myEmbeddingModel")
                 .build();
     }
diff --git a/docs/content/docs/development/workflow_agent.md 
b/docs/content/docs/development/workflow_agent.md
index 230b655e..cea7f479 100644
--- a/docs/content/docs/development/workflow_agent.md
+++ b/docs/content/docs/development/workflow_agent.md
@@ -77,7 +77,7 @@ class ReviewAnalysisAgent(Agent):
     def review_analysis_model() -> ResourceDescriptor:
         """ChatModel which focus on review analysis."""
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.OLLAMA_SETUP,
             connection="ollama_server",
             model="qwen3:8b",
             prompt="review_analysis_prompt",
@@ -143,7 +143,7 @@ public class ReviewAnalysisAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor reviewAnalysisModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_SETUP)
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.OLLAMA_SETUP)
                 .addInitialArgument("connection", "ollamaChatModelConnection")
                 .addInitialArgument("model", "qwen3:8b")
                 .addInitialArgument("prompt", "reviewAnalysisPrompt")
diff --git a/docs/content/docs/get-started/quickstart/react_agent.md 
b/docs/content/docs/get-started/quickstart/react_agent.md
index e3fa22e1..48cfaff2 100644
--- a/docs/content/docs/get-started/quickstart/react_agent.md
+++ b/docs/content/docs/get-started/quickstart/react_agent.md
@@ -49,7 +49,7 @@ agents_env = 
AgentsExecutionEnvironment.get_execution_environment(env)
 agents_env.add_resource(
   "ollama_server",
   ResourceType.CHAT_MODEL_CONNECTION,
-  ResourceDescriptor(clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, 
request_timeout=120),
+  ResourceDescriptor(clazz=ResourceName.ChatModel.OLLAMA_CONNECTION, 
request_timeout=120),
 ).add_resource(
   "notify_shipping_manager", ResourceType.TOOL, 
Tool.from_callable(notify_shipping_manager)
 )
@@ -90,7 +90,7 @@ Create the ReAct Agent instance, configure the chat model, 
prompt and the output
 ```python
 review_analysis_react_agent = ReActAgent(
   chat_model=ResourceDescriptor(
-      clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+      clazz=ResourceName.ChatModel.OLLAMA_SETUP,
       connection="ollama_server",
       model="qwen3:8b",
       tools=["notify_shipping_manager"],
@@ -108,7 +108,7 @@ ReActAgent reviewAnalysisReactAgent = getReActAgent();
 
  private static ReActAgent getReActAgent() {
      return new ReActAgent(
-             
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_SETUP)
+             
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.OLLAMA_SETUP)
                      .addInitialArgument("connection", 
"ollamaChatModelConnection")
                      .addInitialArgument("model", "qwen3:8b")
                      .addInitialArgument(
diff --git a/docs/content/docs/get-started/quickstart/workflow_agent.md 
b/docs/content/docs/get-started/quickstart/workflow_agent.md
index 43c3e3fc..6dc3282c 100644
--- a/docs/content/docs/get-started/quickstart/workflow_agent.md
+++ b/docs/content/docs/get-started/quickstart/workflow_agent.md
@@ -119,7 +119,7 @@ class ReviewAnalysisAgent(Agent):
     def review_analysis_model() -> ResourceDescriptor:
         """ChatModel which focus on review analysis."""
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.OLLAMA_SETUP,
             connection="ollama_server",
             model="qwen3:8b",
             prompt="review_analysis_prompt",
@@ -185,7 +185,7 @@ public class ReviewAnalysisAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor reviewAnalysisModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_SETUP)
+        return 
ResourceDescriptor.Builder.newBuilder(ResourceName.ChatModel.OLLAMA_SETUP)
                 .addInitialArgument("connection", "ollamaChatModelConnection")
                 .addInitialArgument("model", "qwen3:8b")
                 .addInitialArgument("prompt", "reviewAnalysisPrompt")


Reply via email to