This is an automated email from the ASF dual-hosted git repository.

xtsong pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-agents.git

commit 72cac0448d4aa2fb0461b45c762e48e5616131c9
Author: youjin <[email protected]>
AuthorDate: Mon Jan 26 11:15:07 2026 +0800

    [api][python] Introduce Hierarchical ResourceClassName for Cross-Language 
Resource Creation
---
 python/flink_agents/api/resource.py                | 145 ++++++++++++++-------
 .../chat_model_integration_agent.py                |  20 +--
 .../e2e_tests_mcp/mcp_test.py                      |   8 +-
 .../e2e_tests_integration/long_term_memory_test.py |  12 +-
 .../e2e_tests_integration/react_agent_test.py      |  10 +-
 .../chat_model_cross_language_agent.py             |  16 +--
 .../embedding_model_cross_language_agent.py        |  14 +-
 .../vector_store_cross_language_agent.py           |  22 ++--
 .../agents/custom_types_and_resources.py           |   4 +-
 .../quickstart/agents/product_suggestion_agent.py  |   4 +-
 .../quickstart/agents/review_analysis_agent.py     |   4 +-
 .../examples/quickstart/react_agent_example.py     |  10 +-
 .../flink_agents/examples/rag/rag_agent_example.py |  12 +-
 13 files changed, 170 insertions(+), 111 deletions(-)

diff --git a/python/flink_agents/api/resource.py 
b/python/flink_agents/api/resource.py
index fa0e5b4b..f7f7aee8 100644
--- a/python/flink_agents/api/resource.py
+++ b/python/flink_agents/api/resource.py
@@ -211,55 +211,102 @@ def get_resource_class(module_path: str, class_name: 
str) -> Type[Resource]:
     module = importlib.import_module(module_path)
     return getattr(module, class_name)
 
+class ResourceName:
+    """Hierarchical resource class names for pointing a resource 
implementation in
+    ResourceDescriptor.
 
-class Constant:
-    """Constant strings for pointing a built-in resource implementation."""
+    Structure:
+        - Python implementation: ResourceType.PROVIDER_RESOURCEKIND
+        - Java implementation: ResourceType.Java.PROVIDER_RESOURCEKIND
 
-    # Built-in ChatModel
-    # java wrapper
-    JAVA_CHAT_MODEL_CONNECTION = (
-        "flink_agents.api.chat_models.java_chat_model.JavaChatModelConnection"
-    )
-    JAVA_CHAT_MODEL_SETUP = (
-        "flink_agents.api.chat_models.java_chat_model.JavaChatModelSetup"
-    )
-    # ollama
-    OLLAMA_CHAT_MODEL_CONNECTION = 
"flink_agents.integrations.chat_models.ollama_chat_model.OllamaChatModelConnection"
-    OLLAMA_CHAT_MODEL_SETUP = (
-        
"flink_agents.integrations.chat_models.ollama_chat_model.OllamaChatModelSetup"
-    )
-    # anthropic
-    ANTHROPIC_CHAT_MODEL_CONNECTION = 
"flink_agents.integrations.chat_models.anthropic.anthropic_chat_model.AnthropicChatModelConnection"
-    ANTHROPIC_CHAT_MODEL_SETUP = 
"flink_agents.integrations.chat_models.anthropic.anthropic_chat_model.AnthropicChatModelSetup"
-    # Azure
-    TONGYI_CHAT_MODEL_CONNECTION = 
"flink_agents.integrations.chat_models.tongyi_chat_model.TongyiChatModelConnection"
-    TONGYI_CHAT_MODEL_SETUP = (
-        
"flink_agents.integrations.chat_models.tongyi_chat_model.TongyiChatModelSetup"
-    )
-    # OpenAI
-    OPENAI_CHAT_MODEL_CONNECTION = 
"flink_agents.integrations.chat_models.openai.openai_chat_model.OpenAIChatModelConnection"
-    OPENAI_CHAT_MODEL_SETUP = 
"flink_agents.integrations.chat_models.openai.openai_chat_model.OpenAIChatModelSetup"
-
-    # Built-in EmbeddingModel
-    # java wrapper
-    JAVA_EMBEDDING_MODEL_CONNECTION = 
"flink_agents.api.embedding_models.java_embedding_model.JavaEmbeddingModelConnection"
-    JAVA_EMBEDDING_MODEL_SETUP = (
-        
"flink_agents.api.embedding_models.java_embedding_model.JavaEmbeddingModelSetup"
-    )
-    # ollama
-    OLLAMA_EMBEDDING_MODEL_CONNECTION = 
"flink_agents.integrations.embedding_models.local.ollama_embedding_model.OllamaEmbeddingModelConnection"
-    OLLAMA_EMBEDDING_MODEL_SETUP = 
"flink_agents.integrations.embedding_models.local.ollama_embedding_model.OllamaEmbeddingModelSetup"
-
-    # OpenAI
-    OPENAI_EMBEDDING_MODEL_CONNECTION = 
"flink_agents.integrations.embedding_models.openai_embedding_model.OpenAIEmbeddingModelConnection"
-    OPENAI_EMBEDDING_MODEL_SETUP = 
"flink_agents.integrations.embedding_models.openai_embedding_model.OpenAIEmbeddingModelSetup"
-
-    # Built-in VectorStore
-    # java wrapper
-    JAVA_VECTOR_STORE = 
"flink_agents.api.vector_stores.java_vector_store.JavaVectorStore"
-    JAVA_COLLECTION_MANAGEABLE_VECTOR_STORE = 
"flink_agents.api.vector_stores.java_vector_store.JavaCollectionManageableVectorStore"
-    # chroma
-    CHROMA_VECTOR_STORE = 
"flink_agents.integrations.vector_stores.chroma.chroma_vector_store.ChromaVectorStore"
-
-    # MCP
+    Example usage:
+        # Python implementation
+        ResourceName.ChatModel.OLLAMA_CONNECTION
+        ResourceName.ChatModel.OPENAI_SETUP
+
+        # Java implementation
+        ResourceName.ChatModel.Java.OLLAMA_CONNECTION
+    """
+
+    class ChatModel:
+        """ChatModel resource names."""
+
+        # Anthropic
+        ANTHROPIC_CONNECTION = 
"flink_agents.integrations.chat_models.anthropic.anthropic_chat_model.AnthropicChatModelConnection"
+        ANTHROPIC_SETUP = 
"flink_agents.integrations.chat_models.anthropic.anthropic_chat_model.AnthropicChatModelSetup"
+
+        # Ollama
+        OLLAMA_CONNECTION = 
"flink_agents.integrations.chat_models.ollama_chat_model.OllamaChatModelConnection"
+        OLLAMA_SETUP = 
"flink_agents.integrations.chat_models.ollama_chat_model.OllamaChatModelSetup"
+
+        # OpenAI
+        OPENAI_CONNECTION = 
"flink_agents.integrations.chat_models.openai.openai_chat_model.OpenAIChatModelConnection"
+        OPENAI_SETUP = 
"flink_agents.integrations.chat_models.openai.openai_chat_model.OpenAIChatModelSetup"
+
+        # Tongyi
+        TONGYI_CONNECTION = 
"flink_agents.integrations.chat_models.tongyi_chat_model.TongyiChatModelConnection"
+        TONGYI_SETUP = 
"flink_agents.integrations.chat_models.tongyi_chat_model.TongyiChatModelSetup"
+
+        # Java Wrapper
+        JAVA_WRAPPER_CONNECTION = 
"flink_agents.api.chat_models.java_chat_model.JavaChatModelConnection"
+        JAVA_WRAPPER_SETUP = 
"flink_agents.api.chat_models.java_chat_model.JavaChatModelSetup"
+
+        class Java:
+            """Java implementations of ChatModel."""
+
+            # Anthropic
+            ANTHROPIC_CONNECTION = 
"org.apache.flink.agents.integrations.chatmodels.anthropic.AnthropicChatModelConnection"
+            ANTHROPIC_SETUP = 
"org.apache.flink.agents.integrations.chatmodels.anthropic.AnthropicChatModelSetup"
+
+            # Azure
+            AZURE_CONNECTION = 
"org.apache.flink.agents.integrations.chatmodels.anthropic.AzureAIChatModelConnection"
+            AZURE_SETUP = 
"org.apache.flink.agents.integrations.chatmodels.anthropic.AzureAIChatModelSetup"
+
+            # Ollama
+            OLLAMA_CONNECTION = 
"org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelConnection"
+            OLLAMA_SETUP = 
"org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelSetup"
+
+            # OpenAI
+            OPENAI_CONNECTION = 
"org.apache.flink.agents.integrations.chatmodels.openai.OpenAIChatModelConnection"
+            OPENAI_SETUP = 
"org.apache.flink.agents.integrations.chatmodels.openai.OpenAIChatModelSetup"
+
+    class EmbeddingModel:
+        """EmbeddingModel resource names."""
+
+        # Ollama
+        OLLAMA_CONNECTION = 
"flink_agents.integrations.embedding_models.local.ollama_embedding_model.OllamaEmbeddingModelConnection"
+        OLLAMA_SETUP = 
"flink_agents.integrations.embedding_models.local.ollama_embedding_model.OllamaEmbeddingModelSetup"
+
+        # OpenAI
+        OPENAI_CONNECTION = 
"flink_agents.integrations.embedding_models.openai_embedding_model.OpenAIEmbeddingModelConnection"
+        OPENAI_SETUP = 
"flink_agents.integrations.embedding_models.openai_embedding_model.OpenAIEmbeddingModelSetup"
+
+        # Java Wrapper
+        JAVA_WRAPPER_CONNECTION = 
"flink_agents.api.embedding_models.java_embedding_model.JavaEmbeddingModelConnection"
+        JAVA_WRAPPER_SETUP = 
"flink_agents.api.embedding_models.java_embedding_model.JavaEmbeddingModelSetup"
+
+        class Java:
+            """Java implementations of EmbeddingModel."""
+
+            # Ollama
+            OLLAMA_CONNECTION = 
"org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelConnection"
+            OLLAMA_SETUP = 
"org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelSetup"
+
+    class VectorStore:
+        """VectorStore resource names."""
+
+        # Chroma
+        CHROMA_VECTOR_STORE = 
"flink_agents.integrations.vector_stores.chroma.chroma_vector_store.ChromaVectorStore"
+
+        # Java Wrapper
+        JAVA_WRAPPER_VECTOR_STORE = 
"flink_agents.api.vector_stores.java_vector_store.JavaVectorStore"
+        JAVA_WRAPPER_COLLECTION_MANAGEABLE_VECTOR_STORE = 
"flink_agents.api.vector_stores.java_vector_store.JavaCollectionManageableVectorStore"
+
+        class Java:
+            """Java implementations of VectorStore."""
+
+            # Elasticsearch
+            ELASTICSEARCH_VECTOR_STORE = 
"org.apache.flink.agents.integrations.vectorstores.elasticsearch.ElasticsearchVectorStore"
+
+    # MCP resource names
     MCP_SERVER = "flink_agents.integrations.mcp.mcp.MCPServer"
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_integration/chat_model_integration_agent.py
 
b/python/flink_agents/e2e_tests/e2e_tests_integration/chat_model_integration_agent.py
index b68a2f1a..427f642e 100644
--- 
a/python/flink_agents/e2e_tests/e2e_tests_integration/chat_model_integration_agent.py
+++ 
b/python/flink_agents/e2e_tests/e2e_tests_integration/chat_model_integration_agent.py
@@ -28,8 +28,8 @@ from flink_agents.api.decorators import (
 from flink_agents.api.events.chat_event import ChatRequestEvent, 
ChatResponseEvent
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.resource import (
-    Constant,
     ResourceDescriptor,
+    ResourceName,
 )
 from flink_agents.api.runner_context import RunnerContext
 
@@ -42,21 +42,21 @@ class ChatModelTestAgent(Agent):
     def openai_connection() -> ResourceDescriptor:
         """ChatModelConnection responsible for openai model service 
connection."""
         return ResourceDescriptor(
-            clazz=Constant.OPENAI_CHAT_MODEL_CONNECTION, 
api_key=os.environ.get("OPENAI_API_KEY")
+            clazz=ResourceName.ChatModel.OPENAI_CONNECTION, 
api_key=os.environ.get("OPENAI_API_KEY")
         )
 
     @chat_model_connection
     @staticmethod
     def tongyi_connection() -> ResourceDescriptor:
         """ChatModelConnection responsible for tongyi model service 
connection."""
-        return ResourceDescriptor(clazz=Constant.TONGYI_CHAT_MODEL_CONNECTION)
+        return 
ResourceDescriptor(clazz=ResourceName.ChatModel.TONGYI_CONNECTION)
 
     @chat_model_connection
     @staticmethod
     def ollama_connection() -> ResourceDescriptor:
         """ChatModelConnection responsible for ollama model service 
connection."""
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, request_timeout=240.0
+            clazz=ResourceName.ChatModel.OLLAMA_CONNECTION, 
request_timeout=240.0
         )
 
     @chat_model_setup
@@ -66,14 +66,14 @@ class ChatModelTestAgent(Agent):
         model_provider = os.environ.get("MODEL_PROVIDER")
         if model_provider == "Tongyi":
             return ResourceDescriptor(
-                clazz=Constant.TONGYI_CHAT_MODEL_SETUP,
+                clazz=ResourceName.ChatModel.TONGYI_SETUP,
                 connection="tongyi_connection",
                 model=os.environ.get("TONGYI_CHAT_MODEL", "qwen-plus"),
                 tools=["add"],
             )
         elif model_provider == "Ollama":
             return ResourceDescriptor(
-                clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+                clazz=ResourceName.ChatModel.OLLAMA_SETUP,
                 connection="ollama_connection",
                 model=os.environ.get("OLLAMA_CHAT_MODEL", "qwen3:1.7b"),
                 tools=["add"],
@@ -81,7 +81,7 @@ class ChatModelTestAgent(Agent):
             )
         elif model_provider == "OpenAI":
             return ResourceDescriptor(
-                clazz=Constant.OPENAI_CHAT_MODEL_SETUP,
+                clazz=ResourceName.ChatModel.OPENAI_SETUP,
                 connection="openai_connection",
                 model=os.environ.get("OPENAI_CHAT_MODEL", "gpt-3.5-turbo"),
                 tools=["add"],
@@ -97,20 +97,20 @@ class ChatModelTestAgent(Agent):
         model_provider = os.environ.get("MODEL_PROVIDER")
         if model_provider == "Tongyi":
             return ResourceDescriptor(
-                clazz=Constant.TONGYI_CHAT_MODEL_SETUP,
+                clazz=ResourceName.ChatModel.TONGYI_SETUP,
                 connection="tongyi_connection",
                 model=os.environ.get("TONGYI_CHAT_MODEL", "qwen-plus"),
             )
         elif model_provider == "Ollama":
             return ResourceDescriptor(
-                clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+                clazz=ResourceName.ChatModel.OLLAMA_SETUP,
                 connection="ollama_connection",
                 model=os.environ.get("OLLAMA_CHAT_MODEL", "qwen3:1.7b"),
                 extract_reasoning=True,
             )
         elif model_provider == "OpenAI":
             return ResourceDescriptor(
-                clazz=Constant.OPENAI_CHAT_MODEL_SETUP,
+                clazz=ResourceName.ChatModel.OPENAI_SETUP,
                 connection="openai_connection",
                 model=os.environ.get("OPENAI_CHAT_MODEL", "gpt-3.5-turbo"),
             )
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_integration/e2e_tests_mcp/mcp_test.py 
b/python/flink_agents/e2e_tests/e2e_tests_integration/e2e_tests_mcp/mcp_test.py
index 37da6e26..a5f5efc4 100644
--- 
a/python/flink_agents/e2e_tests/e2e_tests_integration/e2e_tests_mcp/mcp_test.py
+++ 
b/python/flink_agents/e2e_tests/e2e_tests_integration/e2e_tests_mcp/mcp_test.py
@@ -47,8 +47,8 @@ from flink_agents.api.events.chat_event import 
ChatRequestEvent, ChatResponseEve
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
 from flink_agents.api.resource import (
-    Constant,
     ResourceDescriptor,
+    ResourceName,
 )
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.e2e_tests.test_utils import pull_model
@@ -71,14 +71,14 @@ class MyMCPAgent(Agent):
     @staticmethod
     def my_mcp_server() -> ResourceDescriptor:
         """Define MCP server connection."""
-        return ResourceDescriptor(clazz=Constant.MCP_SERVER, 
endpoint=MCP_SERVER_ENDPOINT)
+        return ResourceDescriptor(clazz=ResourceName.MCP_SERVER, 
endpoint=MCP_SERVER_ENDPOINT)
 
     @chat_model_connection
     @staticmethod
     def ollama_connection() -> ResourceDescriptor:
         """ChatModelConnection for Ollama."""
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, request_timeout=240.0
+            clazz=ResourceName.ChatModel.OLLAMA_CONNECTION, 
request_timeout=240.0
         )
 
     @chat_model_setup
@@ -86,7 +86,7 @@ class MyMCPAgent(Agent):
     def math_chat_model() -> ResourceDescriptor:
         """ChatModel using MCP prompt and tool."""
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.OLLAMA_SETUP,
             connection="ollama_connection",
             model=OLLAMA_MODEL,
             prompt="ask_sum",  # MCP prompt registered from my_mcp_server
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_integration/long_term_memory_test.py 
b/python/flink_agents/e2e_tests/e2e_tests_integration/long_term_memory_test.py
index 339bf526..46ed4427 100644
--- 
a/python/flink_agents/e2e_tests/e2e_tests_integration/long_term_memory_test.py
+++ 
b/python/flink_agents/e2e_tests/e2e_tests_integration/long_term_memory_test.py
@@ -55,8 +55,8 @@ from flink_agents.api.memory.long_term_memory import (
     MemorySetItem,
 )
 from flink_agents.api.resource import (
-    Constant,
     ResourceDescriptor,
+    ResourceName,
 )
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.e2e_tests.test_utils import pull_model
@@ -124,7 +124,7 @@ class LongTermMemoryAgent(Agent):
     def ollama_connection() -> ResourceDescriptor:
         """ChatModelConnection responsible for ollama model service 
connection."""
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, request_timeout=480.0
+            clazz=ResourceName.ChatModel.OLLAMA_CONNECTION, 
request_timeout=480.0
         )
 
     @chat_model_setup
@@ -132,7 +132,7 @@ class LongTermMemoryAgent(Agent):
     def ollama_qwen3() -> ResourceDescriptor:
         """ChatModel which focus on math, and reuse ChatModelConnection."""
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.OLLAMA_SETUP,
             connection="ollama_connection",
             model=OLLAMA_CHAT_MODEL,
             extract_reasoning=True,
@@ -143,14 +143,14 @@ class LongTermMemoryAgent(Agent):
     @staticmethod
     def ollama_embedding_connection() -> ResourceDescriptor:  # noqa D102
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION, 
request_timeout=240.0
+            clazz=ResourceName.EmbeddingModel.OLLAMA_CONNECTION, 
request_timeout=240.0
         )
 
     @embedding_model_setup
     @staticmethod
     def ollama_nomic_embed_text() -> ResourceDescriptor:  # noqa D102
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_EMBEDDING_MODEL_SETUP,
+            clazz=ResourceName.EmbeddingModel.OLLAMA_SETUP,
             connection="ollama_embedding_connection",
             model=OLLAMA_EMBEDDING_MODEL,
         )
@@ -160,7 +160,7 @@ class LongTermMemoryAgent(Agent):
     def chroma_vector_store() -> ResourceDescriptor:
         """Vector store setup for knowledge base."""
         return ResourceDescriptor(
-            clazz=Constant.CHROMA_VECTOR_STORE,
+            clazz=ResourceName.VectorStore.CHROMA_VECTOR_STORE,
             embedding_model="ollama_nomic_embed_text",
             persist_directory=chromadb_path,
         )
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_integration/react_agent_test.py 
b/python/flink_agents/e2e_tests/e2e_tests_integration/react_agent_test.py
index dee3bcea..f2ca48ea 100644
--- a/python/flink_agents/e2e_tests/e2e_tests_integration/react_agent_test.py
+++ b/python/flink_agents/e2e_tests/e2e_tests_integration/react_agent_test.py
@@ -35,8 +35,8 @@ from flink_agents.api.core_options import AgentConfigOptions, 
ErrorHandlingStrat
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
 from flink_agents.api.prompts.prompt import Prompt
 from flink_agents.api.resource import (
-    Constant,
     ResourceDescriptor,
+    ResourceName,
     ResourceType,
 )
 from flink_agents.api.tools.tool import Tool
@@ -87,7 +87,7 @@ def test_react_agent_on_local_runner() -> None:  # noqa: D103
         env.add_resource(
             "ollama",
             ResourceType.CHAT_MODEL_CONNECTION,
-            ResourceDescriptor(clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, 
request_timeout=240.0),
+            ResourceDescriptor(clazz=ResourceName.ChatModel.OLLAMA_CONNECTION, 
request_timeout=240.0),
         )
         .add_resource("add", ResourceType.TOOL,  Tool.from_callable(add))
         .add_resource("multiply", ResourceType.TOOL, 
Tool.from_callable(multiply))
@@ -107,7 +107,7 @@ def test_react_agent_on_local_runner() -> None:  # noqa: 
D103
     # create ReAct agent.
     agent = ReActAgent(
         chat_model=ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.OLLAMA_SETUP,
             connection="ollama",
             model=OLLAMA_MODEL,
             tools=["add", "multiply"],
@@ -166,7 +166,7 @@ def test_react_agent_on_remote_runner(tmp_path: Path) -> 
None:  # noqa: D103
         env.add_resource(
             "ollama",
             ResourceType.CHAT_MODEL_CONNECTION,
-            ResourceDescriptor(clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, 
request_timeout=240.0),
+            ResourceDescriptor(clazz=ResourceName.ChatModel.OLLAMA_CONNECTION, 
request_timeout=240.0),
         )
         .add_resource("add", ResourceType.TOOL, Tool.from_callable(add))
         .add_resource("multiply", ResourceType.TOOL, 
Tool.from_callable(multiply))
@@ -191,7 +191,7 @@ def test_react_agent_on_remote_runner(tmp_path: Path) -> 
None:  # noqa: D103
     # create ReAct agent.
     agent = ReActAgent(
         chat_model=ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.OLLAMA_SETUP,
             connection="ollama",
             model=OLLAMA_MODEL,
             tools=["add", "multiply"],
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/chat_model_cross_language_agent.py
 
b/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/chat_model_cross_language_agent.py
index 44ab63ce..709bb6a4 100644
--- 
a/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/chat_model_cross_language_agent.py
+++ 
b/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/chat_model_cross_language_agent.py
@@ -30,8 +30,8 @@ from flink_agents.api.events.chat_event import 
ChatRequestEvent, ChatResponseEve
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.prompts.prompt import Prompt
 from flink_agents.api.resource import (
-    Constant,
     ResourceDescriptor,
+    ResourceName,
 )
 from flink_agents.api.runner_context import RunnerContext
 
@@ -69,7 +69,7 @@ class ChatModelCrossLanguageAgent(Agent):
     def ollama_connection_python() -> ResourceDescriptor:
         """ChatModelConnection responsible for ollama model service 
connection."""
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, request_timeout=240.0
+            clazz=ResourceName.ChatModel.OLLAMA_CONNECTION, 
request_timeout=240.0
         )
 
     @chat_model_connection
@@ -77,8 +77,8 @@ class ChatModelCrossLanguageAgent(Agent):
     def ollama_connection_java() -> ResourceDescriptor:
         """ChatModelConnection responsible for ollama model service 
connection."""
         return ResourceDescriptor(
-            clazz=Constant.JAVA_CHAT_MODEL_CONNECTION,
-            
java_clazz="org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelConnection",
+            clazz=ResourceName.ChatModel.JAVA_WRAPPER_CONNECTION,
+            java_clazz=ResourceName.ChatModel.Java.OLLAMA_CONNECTION,
             endpoint="http://localhost:11434";,
             requestTimeout=120,
         )
@@ -88,9 +88,9 @@ class ChatModelCrossLanguageAgent(Agent):
     def math_chat_model() -> ResourceDescriptor:
         """ChatModel which focus on math, and reuse ChatModelConnection."""
         return ResourceDescriptor(
-            clazz=Constant.JAVA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.JAVA_WRAPPER_SETUP,
+            java_clazz=ResourceName.ChatModel.Java.OLLAMA_SETUP,
             connection="ollama_connection_python",
-            
java_clazz="org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelSetup",
             model=os.environ.get("OLLAMA_CHAT_MODEL", "qwen3:1.7b"),
             prompt="from_messages_prompt",
             tools=["add"],
@@ -102,9 +102,9 @@ class ChatModelCrossLanguageAgent(Agent):
     def creative_chat_model() -> ResourceDescriptor:
         """ChatModel which focus on text generate, and reuse 
ChatModelConnection."""
         return ResourceDescriptor(
-            clazz=Constant.JAVA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.JAVA_WRAPPER_SETUP,
+            java_clazz=ResourceName.ChatModel.Java.OLLAMA_SETUP,
             connection="ollama_connection_java",
-            
java_clazz="org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelSetup",
             model=os.environ.get("OLLAMA_CHAT_MODEL", "qwen3:1.7b"),
             prompt="from_text_prompt",
             extract_reasoning=True,
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/embedding_model_cross_language_agent.py
 
b/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/embedding_model_cross_language_agent.py
index ba701898..56a85ffc 100644
--- 
a/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/embedding_model_cross_language_agent.py
+++ 
b/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/embedding_model_cross_language_agent.py
@@ -24,7 +24,11 @@ from flink_agents.api.decorators import (
     embedding_model_setup,
 )
 from flink_agents.api.events.event import InputEvent, OutputEvent
-from flink_agents.api.resource import Constant, ResourceDescriptor, 
ResourceType
+from flink_agents.api.resource import (
+    ResourceDescriptor,
+    ResourceName,
+    ResourceType,
+)
 from flink_agents.api.runner_context import RunnerContext
 
 
@@ -36,8 +40,8 @@ class EmbeddingModelCrossLanguageAgent(Agent):
     def embedding_model_connection() -> ResourceDescriptor:
         """EmbeddingModelConnection responsible for ollama model service 
connection."""
         return ResourceDescriptor(
-            clazz=Constant.JAVA_EMBEDDING_MODEL_CONNECTION,
-            
java_clazz="org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelConnection",
+            clazz=ResourceName.EmbeddingModel.JAVA_WRAPPER_CONNECTION,
+            java_clazz=ResourceName.EmbeddingModel.Java.OLLAMA_CONNECTION,
             host="http://localhost:11434";,
         )
 
@@ -46,8 +50,8 @@ class EmbeddingModelCrossLanguageAgent(Agent):
     def embedding_model() -> ResourceDescriptor:
         """EmbeddingModel which focus on math, and reuse 
ChatModelConnection."""
         return ResourceDescriptor(
-            clazz=Constant.JAVA_EMBEDDING_MODEL_SETUP,
-            
java_clazz="org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelSetup",
+            clazz=ResourceName.EmbeddingModel.JAVA_WRAPPER_SETUP,
+            java_clazz=ResourceName.EmbeddingModel.Java.OLLAMA_SETUP,
             connection="embedding_model_connection",
             model=os.environ.get("OLLAMA_EMBEDDING_MODEL", 
"nomic-embed-text:latest"),
         )
diff --git 
a/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/vector_store_cross_language_agent.py
 
b/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/vector_store_cross_language_agent.py
index 576dc244..cc41e5a1 100644
--- 
a/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/vector_store_cross_language_agent.py
+++ 
b/python/flink_agents/e2e_tests/e2e_tests_resource_cross_language/vector_store_cross_language_agent.py
@@ -32,7 +32,11 @@ from flink_agents.api.events.context_retrieval_event import (
     ContextRetrievalResponseEvent,
 )
 from flink_agents.api.events.event import InputEvent, OutputEvent
-from flink_agents.api.resource import Constant, ResourceDescriptor, 
ResourceType
+from flink_agents.api.resource import (
+    ResourceDescriptor,
+    ResourceName,
+    ResourceType,
+)
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.api.vector_stores.vector_store import (
     CollectionManageableVectorStore,
@@ -51,12 +55,12 @@ class VectorStoreCrossLanguageAgent(Agent):
         """EmbeddingModelConnection responsible for ollama model service 
connection."""
         if os.environ.get("EMBEDDING_TYPE") == "JAVA":
             return ResourceDescriptor(
-                clazz=Constant.JAVA_EMBEDDING_MODEL_CONNECTION,
-                
java_clazz="org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelConnection",
+                clazz=ResourceName.EmbeddingModel.JAVA_WRAPPER_CONNECTION,
+                java_clazz=ResourceName.EmbeddingModel.Java.OLLAMA_CONNECTION,
                 host="http://localhost:11434";,
             )
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION,
+            clazz=ResourceName.EmbeddingModel.OLLAMA_CONNECTION,
             host="http://localhost:11434";,
         )
 
@@ -66,15 +70,15 @@ class VectorStoreCrossLanguageAgent(Agent):
         """EmbeddingModel which focus on math, and reuse 
ChatModelConnection."""
         if os.environ.get("EMBEDDING_TYPE") == "JAVA":
             return ResourceDescriptor(
-                clazz=Constant.JAVA_EMBEDDING_MODEL_SETUP,
-                
java_clazz="org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelSetup",
+                clazz=ResourceName.EmbeddingModel.JAVA_WRAPPER_SETUP,
+                java_clazz=ResourceName.EmbeddingModel.Java.OLLAMA_SETUP,
                 connection="embedding_model_connection",
                 model=os.environ.get(
                     "OLLAMA_EMBEDDING_MODEL", "nomic-embed-text:latest"
                 ),
             )
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_EMBEDDING_MODEL_SETUP,
+            clazz=ResourceName.EmbeddingModel.OLLAMA_SETUP,
             connection="embedding_model_connection",
             model=os.environ.get("OLLAMA_EMBEDDING_MODEL", 
"nomic-embed-text:latest"),
         )
@@ -84,8 +88,8 @@ class VectorStoreCrossLanguageAgent(Agent):
     def vector_store() -> ResourceDescriptor:
         """Vector store setup for knowledge base."""
         return ResourceDescriptor(
-            clazz=Constant.JAVA_COLLECTION_MANAGEABLE_VECTOR_STORE,
-            
java_clazz="org.apache.flink.agents.integrations.vectorstores.elasticsearch.ElasticsearchVectorStore",
+            
clazz=ResourceName.VectorStore.JAVA_WRAPPER_COLLECTION_MANAGEABLE_VECTOR_STORE,
+            
java_clazz=ResourceName.VectorStore.Java.ELASTICSEARCH_VECTOR_STORE,
             embedding_model="embedding_model",
             host=os.environ.get("ES_HOST"),
             index="my_documents",
diff --git 
a/python/flink_agents/examples/quickstart/agents/custom_types_and_resources.py 
b/python/flink_agents/examples/quickstart/agents/custom_types_and_resources.py
index a150604b..3bd463f9 100644
--- 
a/python/flink_agents/examples/quickstart/agents/custom_types_and_resources.py
+++ 
b/python/flink_agents/examples/quickstart/agents/custom_types_and_resources.py
@@ -21,7 +21,7 @@ from pydantic import BaseModel
 
 from flink_agents.api.chat_message import ChatMessage, MessageRole
 from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import Constant, ResourceDescriptor
+from flink_agents.api.resource import ResourceDescriptor, ResourceName
 
 # Prompt for review analysis agent.
 review_analysis_system_prompt_str = """
@@ -198,5 +198,5 @@ class ProductReviewAnalysisRes(BaseModel):
 
 # ollama chat model connection descriptor
 ollama_server_descriptor = ResourceDescriptor(
-    clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, request_timeout=120
+    clazz=ResourceName.ChatModel.OLLAMA_CONNECTION, request_timeout=120
 )
diff --git 
a/python/flink_agents/examples/quickstart/agents/product_suggestion_agent.py 
b/python/flink_agents/examples/quickstart/agents/product_suggestion_agent.py
index e82c8130..2bd5a116 100644
--- a/python/flink_agents/examples/quickstart/agents/product_suggestion_agent.py
+++ b/python/flink_agents/examples/quickstart/agents/product_suggestion_agent.py
@@ -29,7 +29,7 @@ from flink_agents.api.decorators import (
 from flink_agents.api.events.chat_event import ChatRequestEvent, 
ChatResponseEvent
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import Constant, ResourceDescriptor
+from flink_agents.api.resource import ResourceDescriptor, ResourceName
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.examples.quickstart.agents.custom_types_and_resources import 
(
     ProductSuggestion,
@@ -65,7 +65,7 @@ class ProductSuggestionAgent(Agent):
     def generate_suggestion_model() -> ResourceDescriptor:
         """ChatModel which focus on generating product suggestions."""
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.OLLAMA_SETUP,
             connection="ollama_server",
             model="qwen3:8b",
             prompt="generate_suggestion_prompt",
diff --git 
a/python/flink_agents/examples/quickstart/agents/review_analysis_agent.py 
b/python/flink_agents/examples/quickstart/agents/review_analysis_agent.py
index b6abc284..18f0326e 100644
--- a/python/flink_agents/examples/quickstart/agents/review_analysis_agent.py
+++ b/python/flink_agents/examples/quickstart/agents/review_analysis_agent.py
@@ -30,7 +30,7 @@ from flink_agents.api.decorators import (
 from flink_agents.api.events.chat_event import ChatRequestEvent, 
ChatResponseEvent
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import Constant, ResourceDescriptor
+from flink_agents.api.resource import ResourceDescriptor, ResourceName
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.examples.quickstart.agents.custom_types_and_resources import 
(
     ProductReviewAnalysisRes,
@@ -81,7 +81,7 @@ class ReviewAnalysisAgent(Agent):
     def review_analysis_model() -> ResourceDescriptor:
         """ChatModel which focus on review analysis."""
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.OLLAMA_SETUP,
             connection="ollama_server",
             model="qwen3:8b",
             prompt="review_analysis_prompt",
diff --git a/python/flink_agents/examples/quickstart/react_agent_example.py 
b/python/flink_agents/examples/quickstart/react_agent_example.py
index 9f883c15..ca309975 100644
--- a/python/flink_agents/examples/quickstart/react_agent_example.py
+++ b/python/flink_agents/examples/quickstart/react_agent_example.py
@@ -23,7 +23,11 @@ from pyflink.datastream.connectors.file_system import 
FileSource, StreamFormat
 
 from flink_agents.api.agents.react_agent import ReActAgent
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
-from flink_agents.api.resource import Constant, ResourceDescriptor, 
ResourceType
+from flink_agents.api.resource import (
+    ResourceDescriptor,
+    ResourceName,
+    ResourceType,
+)
 from flink_agents.api.tools.tool import Tool
 from flink_agents.examples.quickstart.agents.custom_types_and_resources import 
(
     ProductReview,
@@ -54,7 +58,7 @@ def main() -> None:
     agents_env.add_resource(
         "ollama_server",
         ResourceType.CHAT_MODEL_CONNECTION,
-        ResourceDescriptor(clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, 
request_timeout=120),
+        ResourceDescriptor(clazz=ResourceName.ChatModel.OLLAMA_CONNECTION, 
request_timeout=120),
     ).add_resource(
         "notify_shipping_manager", ResourceType.TOOL, 
Tool.from_callable(notify_shipping_manager)
     )
@@ -79,7 +83,7 @@ def main() -> None:
     # Create react agent
     review_analysis_react_agent = ReActAgent(
         chat_model=ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.OLLAMA_SETUP,
             connection="ollama_server",
             model="qwen3:8b",
             tools=["notify_shipping_manager"],
diff --git a/python/flink_agents/examples/rag/rag_agent_example.py 
b/python/flink_agents/examples/rag/rag_agent_example.py
index 0e44f909..8d9352a2 100644
--- a/python/flink_agents/examples/rag/rag_agent_example.py
+++ b/python/flink_agents/examples/rag/rag_agent_example.py
@@ -35,8 +35,8 @@ from flink_agents.api.events.event import InputEvent, 
OutputEvent
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
 from flink_agents.api.prompts.prompt import Prompt
 from flink_agents.api.resource import (
-    Constant,
     ResourceDescriptor,
+    ResourceName,
     ResourceType,
 )
 from flink_agents.api.runner_context import RunnerContext
@@ -78,7 +78,7 @@ Please provide a helpful answer based on the context 
provided."""
     def text_embedder() -> ResourceDescriptor:
         """Embedding model setup for generating text embeddings."""
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_EMBEDDING_MODEL_SETUP,
+            clazz=ResourceName.EmbeddingModel.OLLAMA_SETUP,
             connection="ollama_embedding_connection",
             model=OLLAMA_EMBEDDING_MODEL,
         )
@@ -88,7 +88,7 @@ Please provide a helpful answer based on the context 
provided."""
     def knowledge_base() -> ResourceDescriptor:
         """Vector store setup for knowledge base."""
         return ResourceDescriptor(
-            clazz=Constant.CHROMA_VECTOR_STORE,
+            clazz=ResourceName.VectorStore.CHROMA_VECTOR_STORE,
             embedding_model="text_embedder",
             collection="example_knowledge_base",
         )
@@ -98,7 +98,7 @@ Please provide a helpful answer based on the context 
provided."""
     def chat_model() -> ResourceDescriptor:
         """Chat model setup for generating responses."""
         return ResourceDescriptor(
-            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+            clazz=ResourceName.ChatModel.OLLAMA_SETUP,
             connection="ollama_chat_connection",
             model=OLLAMA_CHAT_MODEL
         )
@@ -174,8 +174,8 @@ if __name__ == "__main__":
     agents_env = AgentsExecutionEnvironment.get_execution_environment()
 
     # Setup Ollama embedding and chat model connections
-    agents_env.add_resource("ollama_embedding_connection", 
ResourceType.EMBEDDING_MODEL_CONNECTION, 
ResourceDescriptor(clazz=Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION))
-    agents_env.add_resource("ollama_chat_connection", 
ResourceType.EMBEDDING_MODEL, 
ResourceDescriptor(clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION))
+    agents_env.add_resource("ollama_embedding_connection", 
ResourceType.EMBEDDING_MODEL_CONNECTION, 
ResourceDescriptor(clazz=ResourceName.EmbeddingModel.OLLAMA_CONNECTION))
+    agents_env.add_resource("ollama_chat_connection", 
ResourceType.EMBEDDING_MODEL, 
ResourceDescriptor(clazz=ResourceName.ChatModel.OLLAMA_CONNECTION))
 
     output_list = agents_env.from_list(input_list).apply(agent).to_list()
 


Reply via email to