This is an automated email from the ASF dual-hosted git repository.

xtsong pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-agents.git

commit 545743cd2881d069cd0d18bcfc73d9d1bfb1f054
Author: WenjinXie <[email protected]>
AuthorDate: Wed Sep 24 16:13:08 2025 +0800

    [api] Extract ResourceDescriptor abstraction.
---
 python/flink_agents/api/agent.py                   | 301 ++++-----------------
 python/flink_agents/api/agents/react_agent.py      |  52 ++--
 python/flink_agents/api/chat_models/chat_model.py  |   4 +-
 python/flink_agents/api/execution_environment.py   | 248 ++---------------
 python/flink_agents/api/resource.py                |  24 +-
 python/flink_agents/api/tools/mcp.py               |   4 +-
 python/flink_agents/api/tools/tool.py              |  22 +-
 python/flink_agents/examples/chat_model_example.py |  77 +++---
 .../integrate_table_with_react_agent_example.py    |  18 +-
 .../quickstart/agents/product_suggestion_agent.py  |  15 +-
 .../quickstart/agents/review_analysis_agent.py     |  14 +-
 .../quickstart/product_improve_suggestion.py       |  10 +-
 .../examples/quickstart/product_review_analysis.py |   9 +-
 .../flink_agents/examples/react_agent_example.py   |  16 +-
 .../chat_models/anthropic/anthropic_chat_model.py  |   4 +-
 .../anthropic/tests/test_anthropic_chat_model.py   |   2 +-
 .../integrations/chat_models/ollama_chat_model.py  |   4 +-
 .../chat_models/openai/openai_chat_model.py        |   4 +-
 .../openai/tests/test_openai_chat_model.py         |   2 +-
 .../chat_models/tests/test_ollama_chat_model.py    |   2 +-
 .../chat_models/tests/test_tongyi_chat_model.py    |   2 +-
 .../integrations/chat_models/tongyi_chat_model.py  |   4 +-
 python/flink_agents/plan/agent_plan.py             | 176 ++----------
 python/flink_agents/plan/resource_provider.py      |  14 +
 .../python_agent_plan_compatibility_test_agent.py  |  13 +-
 .../plan/tests/resources/agent_plan.json           |   5 -
 python/flink_agents/plan/tests/test_agent_plan.py  | 133 ++++-----
 .../plan/tests/tools/test_function_tool.py         |   2 +-
 python/flink_agents/plan/tools/function_tool.py    |  16 +-
 .../runtime/tests/test_built_in_actions.py         |  24 +-
 .../runtime/tests/test_get_resource_in_action.py   |  14 +-
 31 files changed, 372 insertions(+), 863 deletions(-)

diff --git a/python/flink_agents/api/agent.py b/python/flink_agents/api/agent.py
index 6e65c36..0324dac 100644
--- a/python/flink_agents/api/agent.py
+++ b/python/flink_agents/api/agent.py
@@ -18,22 +18,13 @@
 from abc import ABC
 from typing import Any, Callable, Dict, List, Tuple, Type
 
-from flink_agents.api.chat_models.chat_model import (
-    BaseChatModelConnection,
-    BaseChatModelSetup,
-)
-from flink_agents.api.embedding_models.embedding_model import (
-    BaseEmbeddingModelConnection,
-    BaseEmbeddingModelSetup,
-)
 from flink_agents.api.events.event import Event
-from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import ResourceType
-from flink_agents.api.tools.mcp import MCPServer
-from flink_agents.api.vector_stores.vector_store import (
-    BaseVectorStoreConnection,
-    BaseVectorStoreSetup,
+from flink_agents.api.resource import (
+    ResourceDescriptor,
+    ResourceType,
+    SerializableResource,
 )
+from flink_agents.api.tools.mcp import MCPServer
 
 
 class Agent(ABC):
@@ -54,17 +45,17 @@ class Agent(ABC):
 
                 @chat_model_connection
                 @staticmethod
-                def my_connection() -> Tuple[Type[BaseChatModelConnection],
-                                       Dict[str, Any]]:
-                    return OllamaChatModelConnection, {"name": "my_connection",
-                                                       "model": "qwen2:7b",
-                                                       "base_url": 
"http://localhost:11434"}
+                def my_connection() -> ResourceDescriptor:
+                    return ResourceDescriptor(clazz=OllamaChatModelConnection,
+                                              model="qwen2:7b",
+                                              
base_url="http://localhost:11434";)
 
                 @chat_model_setup
                 @staticmethod
-                def my_chat_model() -> Tuple[Type[ChatModel], Dict[str, Any]]:
-                    return OllamaChatModel, {"name": "model",
-                                             "connection": "my_connection"}
+                def my_chat_model() -> ResourceDescriptor:
+                    return ResourceDescriptor(clazz=OllamaChatModel,
+                                              connection="my_connection")
+
         * Add actions and resources to an Agent instance
         ::
 
@@ -72,12 +63,25 @@ class Agent(ABC):
             my_agent.add_action(name="my_action",
                                 events=[InputEvent],
                                 func=action_function)
-                    .add_chat_model_connection(name="my_connection",
-                                               
connection=OllamaChatModelConnection,
-                                               arg1=xxx)
-                    .add_chat_model_setup(name="my_chat_model",
-                                          chat_model=OllamaChatModelSetup,
-                                          connection="my_connection")
+                    .add_resource(name="my_connection",
+                                  instance=ResourceDescriptor(
+                                        clazz=OllamaChatModelConnection,
+                                        arg1=xxx
+                                )
+                    .add_resource(
+                        name="my_connection",
+                        instance=ResourceDescriptor(
+                            clazz=OllamaChatModelConnection,
+                            arg1=xxx
+                        )
+                    )
+                    .add_resource(
+                        name="my_chat_model",
+                        instance=ResourceDescriptor(
+                            clazz=OllamaChatModelSetup,
+                            connection="my_connection"
+                        )
+                    )
     """
 
     _actions: Dict[str, Tuple[List[Type[Event]], Callable, Dict[str, Any]]]
@@ -128,239 +132,34 @@ class Agent(ABC):
         self._actions[name] = (events, func, config if config else None)
         return self
 
-    def add_prompt(self, name: str, prompt: Prompt) -> "Agent":
-        """Add prompt to agent.
-
-        Parameters
-        ----------
-        name : str
-            The name of the prompt, should be unique in the same Agent.
-        prompt: Prompt
-            The prompt to be used in the agent.
-
-        Returns:
-        -------
-        Agent
-            The modified Agent instance.
-        """
-        if ResourceType.PROMPT not in self._resources:
-            self._resources[ResourceType.PROMPT] = {}
-        if name in self._resources[ResourceType.PROMPT]:
-            msg = f"Prompt {name} already defined"
-            raise ValueError(msg)
-        self._resources[ResourceType.PROMPT][name] = prompt
-        return self
-
-    def add_tool(self, name: str, func: Callable) -> "Agent":
-        """Add function tool to agent.
-
-        Parameters
-        ----------
-        name : str
-            The name of the tool, should be unique in the same Agent.
-        func: Callable
-            The execution function of the tool.
-
-        Returns:
-        -------
-        Agent
-            The modified Agent instance.
-        """
-        if ResourceType.TOOL not in self._resources:
-            self._resources[ResourceType.TOOL] = {}
-        if name in self._resources[ResourceType.TOOL]:
-            msg = f"Function tool {name} already defined"
-            raise ValueError(msg)
-        self._resources[ResourceType.TOOL][name] = func
-        return self
-
-    def add_chat_model_connection(
-        self, name: str, connection: Type[BaseChatModelConnection], **kwargs: 
Any
+    def add_resource(
+        self, name: str, instance: SerializableResource | ResourceDescriptor
     ) -> "Agent":
-        """Add chat model connection to agent.
+        """Add resource to agent instance.
 
         Parameters
         ----------
         name : str
-            The name of the chat model connection, should be unique in the 
same Agent.
-        connection: Type[BaseChatModelConnection]
-            The type of chat model connection.
-        **kwargs: Any
-            Initialize keyword arguments passed to the chat model connection.
-
-        Returns:
-        -------
-        Agent
-            The modified Agent instance.
-        """
-        if ResourceType.CHAT_MODEL_CONNECTION not in self._resources:
-            self._resources[ResourceType.CHAT_MODEL_CONNECTION] = {}
-        if name in self._resources[ResourceType.CHAT_MODEL_CONNECTION]:
-            msg = f"Chat model connection {name} already defined"
-            raise ValueError(msg)
-        kwargs["name"] = name
-        self._resources[ResourceType.CHAT_MODEL_CONNECTION][name] = 
(connection, kwargs)
-        return self
-
-    def add_chat_model_setup(
-        self, name: str, chat_model: Type[BaseChatModelSetup], **kwargs: Any
-    ) -> "Agent":
-        """Add chat model setup to agent.
-
-        Parameters
-        ----------
-        name : str
-            The name of the chat model, should be unique in the same Agent.
-        chat_model: Type[BaseChatModel]
-            The type of chat model.
-        **kwargs: Any
-            Initialize keyword arguments passed to the chat model setup.
-
-        Returns:
-        -------
-        Agent
-            The modified Agent instance.
-        """
-        if ResourceType.CHAT_MODEL not in self._resources:
-            self._resources[ResourceType.CHAT_MODEL] = {}
-        if name in self._resources[ResourceType.CHAT_MODEL]:
-            msg = f"Chat model setup {name} already defined"
-            raise ValueError(msg)
-        kwargs["name"] = name
-        self._resources[ResourceType.CHAT_MODEL][name] = (chat_model, kwargs)
-        return self
-
-    def add_embedding_model_connection(
-        self, name: str, connection: Type[BaseEmbeddingModelConnection], 
**kwargs: Any
-    ) -> "Agent":
-        """Add embedding model connection to agent.
-
-        Parameters
-        ----------
-        name : str
-            The name of the embedding model connection, should be unique in 
the same
-            Agent.
-        connection: Type[BaseEmbeddingModelConnection]
-            The type of embedding model connection.
-        **kwargs: Any
-            Initialize keyword arguments passed to the embedding model 
connection.
-
-        Returns:
-        -------
-        Agent
-            The modified Agent instance.
-        """
-        if ResourceType.EMBEDDING_MODEL_CONNECTION not in self._resources:
-            self._resources[ResourceType.EMBEDDING_MODEL_CONNECTION] = {}
-        if name in self._resources[ResourceType.EMBEDDING_MODEL_CONNECTION]:
-            msg = f"Embedding model connection {name} already defined"
-            raise ValueError(msg)
-        kwargs["name"] = name
-        self._resources[ResourceType.EMBEDDING_MODEL_CONNECTION][name] = 
(connection, kwargs)
-        return self
-
-    def add_embedding_model_setup(
-        self, name: str, embedding_model: Type[BaseEmbeddingModelSetup], 
**kwargs: Any
-    ) -> "Agent":
-        """Add embedding model setup to agent.
-
-        Parameters
-        ----------
-        name : str
-            The name of the embedding model, should be unique in the same 
Agent.
-        embedding_model: Type[BaseEmbeddingModelSetup]
-            The type of embedding model.
-        **kwargs: Any
-            Initialize keyword arguments passed to the embedding model setup.
-
-        Returns:
-        -------
-        Agent
-            The modified Agent instance.
-        """
-        if ResourceType.EMBEDDING_MODEL not in self._resources:
-            self._resources[ResourceType.EMBEDDING_MODEL] = {}
-        if name in self._resources[ResourceType.EMBEDDING_MODEL]:
-            msg = f"Embedding model setup {name} already defined"
-            raise ValueError(msg)
-        kwargs["name"] = name
-        self._resources[ResourceType.EMBEDDING_MODEL][name] = 
(embedding_model, kwargs)
-        return self
-
-    def add_mcp_server(self, name: str, mcp_server: MCPServer) -> "Agent":
-        """Add an MCP server to the agent.
-
-        Parameters
-        ----------
-        name : str
-            The name of the MCP server, should be unique in the same Agent.
-        mcp_server : MCPServer
-            The MCP server resource instance.
+            The name of the prompt, should be unique in the same Agent.
+        instance: SerializableResource | ResourceDescriptor
+            The serializable resource instance, or the descriptor of resource.
 
         Returns:
         -------
         Agent
-            The modified Agent instance.
+            The agent to add the resource.
         """
-        if name in self._resources[ResourceType.MCP_SERVER]:
-            msg = f"MCP server {name} already defined"
+        if isinstance(instance, SerializableResource):
+            resource_type = instance.resource_type()
+        elif isinstance(instance, ResourceDescriptor):
+            resource_type = instance.clazz.resource_type()
+        else:
+            err_msg = f"Unexpected resource {instance}"
+            raise TypeError(err_msg)
+
+        if name in self._resources[resource_type]:
+            msg = f"{resource_type.value} {name} already defined"
             raise ValueError(msg)
-        self._resources[ResourceType.MCP_SERVER][name] = mcp_server
-        return self
-
-    def add_vector_store_connection(
-            self, name: str, connection: Type[BaseVectorStoreConnection], 
**kwargs: Any
-    ) -> "Agent":
-        """Add vector store connection to agent.
 
-        Parameters
-        ----------
-        name : str
-            The name of the vector store connection, should be unique in the 
same
-            Agent.
-        connection: Type[BaseVectorStoreConnection]
-            The type of vector store connection.
-        **kwargs: Any
-            Initialize keyword arguments passed to the vector store connection.
-
-        Returns:
-        -------
-        Agent
-            The modified Agent instance.
-        """
-        if ResourceType.VECTOR_STORE_CONNECTION not in self._resources:
-            self._resources[ResourceType.VECTOR_STORE_CONNECTION] = {}
-        if name in self._resources[ResourceType.VECTOR_STORE_CONNECTION]:
-            msg = f"Vector store connection {name} already defined"
-            raise ValueError(msg)
-        kwargs["name"] = name
-        self._resources[ResourceType.VECTOR_STORE_CONNECTION][name] = 
(connection, kwargs)
-        return self
-
-    def add_vector_store_setup(
-            self, name: str, vector_store: Type[BaseVectorStoreSetup], 
**kwargs: Any
-    ) -> "Agent":
-        """Add vector store setup to agent.
-
-        Parameters
-        ----------
-        name : str
-            The name of the vector store, should be unique in the same Agent.
-        vector_store: Type[BaseVectorStoreSetup]
-            The type of vector store.
-        **kwargs: Any
-            Initialize keyword arguments passed to the vector store setup.
-
-        Returns:
-        -------
-        Agent
-            The modified Agent instance.
-        """
-        if ResourceType.VECTOR_STORE not in self._resources:
-            self._resources[ResourceType.VECTOR_STORE] = {}
-        if name in self._resources[ResourceType.VECTOR_STORE]:
-            msg = f"Vector store setup {name} already defined"
-            raise ValueError(msg)
-        kwargs["name"] = name
-        self._resources[ResourceType.VECTOR_STORE][name] = (vector_store, 
kwargs)
+        self._resources[resource_type][name] = instance
         return self
diff --git a/python/flink_agents/api/agents/react_agent.py 
b/python/flink_agents/api/agents/react_agent.py
index 825cc7e..67cb0df 100644
--- a/python/flink_agents/api/agents/react_agent.py
+++ b/python/flink_agents/api/agents/react_agent.py
@@ -17,7 +17,7 @@
 
#################################################################################
 import importlib
 import json
-from typing import Any, List, cast
+from typing import Any, cast
 
 from pydantic import BaseModel, ConfigDict, model_serializer, model_validator
 from pyflink.common import Row
@@ -25,12 +25,11 @@ from pyflink.common.typeinfo import BasicType, 
BasicTypeInfo, RowTypeInfo
 
 from flink_agents.api.agent import Agent
 from flink_agents.api.chat_message import ChatMessage, MessageRole
-from flink_agents.api.chat_models.chat_model import BaseChatModelSetup
 from flink_agents.api.decorators import action
 from flink_agents.api.events.chat_event import ChatRequestEvent, 
ChatResponseEvent
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import ResourceType
+from flink_agents.api.resource import ResourceDescriptor, ResourceType
 from flink_agents.api.runner_context import RunnerContext
 
 _DEFAULT_CHAT_MODEL = "_default_chat_model"
@@ -106,11 +105,12 @@ class ReActAgent(Agent):
 
             # register resource to execution environment
             (
-                env.add_chat_model_connection(
-                    name="ollama", connection=OllamaChatModelConnection, 
model=model
+                env.add_resource(
+                    "ollama",
+                    ResourceDescriptor(clazz=OllamaChatModelConnection, 
model=model),
                 )
-                .add_tool("add", add)
-                .add_tool("multiply", multiply)
+                .add_resource("add", add)
+                .add_resource("multiply", multiply)
             )
 
             # prepare prompt
@@ -128,57 +128,39 @@ class ReActAgent(Agent):
 
             # create ReAct agent.
             agent = ReActAgent(
-                chat_model=OllamaChatModelSetup,
-                connection="ollama",
+                chat_model=ResourceDescriptor(
+                    clazz=OllamaChatModelSetup,
+                    connection="ollama_server",
+                    tools=["notify_shipping_manager"],
+                ),
                 prompt=prompt,
-                tools=["add", "multiply"],
-                output_schema=OutputData,
+                output_schema=OutputData
             )
     """
 
     def __init__(
         self,
         *,
-        chat_model_setup: type[BaseChatModelSetup],
-        connection: str,
+        chat_model: ResourceDescriptor,
         prompt: Prompt | None = None,
-        tools: List[str] | None = None,
         output_schema: type[BaseModel] | RowTypeInfo | None = None,
-        **kwargs: Any,
     ) -> None:
         """Init method of ReActAgent.
 
         Parameters
         ----------
-        chat_model_setup : BaseChatModelSetup
-            The type of the chat model setup used in this ReAct agent.
-        connection: str
-            The name of the chat model connection used in chat model setup. The
-            connection should be registered in environment.
+        chat_model : ResourceDescriptor
+            The descriptor of the chat model used in this ReAct agent.
         prompt : Optional[Prompt] = None
             Prompt to instruct the llm, could include input and output example,
             task and so on.
-        tools : Optional[List[str]]
-            Tools names can be used in this ReAct agent. The tools should be 
registered
-            in environment.
         output_schema : Optional[Union[type[BaseModel], RowTypeInfo]] = None
             The schema should be RowTypeInfo or subclass of BaseModel. When 
user
             provide output schema, ReAct agent will add system prompt to 
instruct
             response format of llm, and add output parser according to the 
schema.
-        **kwargs: Any
-            The initialize arguments of chat_model_setup.
         """
         super().__init__()
-        settings = {
-            "name": _DEFAULT_CHAT_MODEL,
-            "connection": connection,
-            "tools": tools,
-        }
-        settings.update(kwargs)
-        self._resources[ResourceType.CHAT_MODEL][_DEFAULT_CHAT_MODEL] = (
-            chat_model_setup,
-            settings,
-        )
+        self.add_resource(_DEFAULT_CHAT_MODEL, chat_model)
 
         if output_schema:
             if isinstance(output_schema, type) and issubclass(output_schema, 
BaseModel):
diff --git a/python/flink_agents/api/chat_models/chat_model.py 
b/python/flink_agents/api/chat_models/chat_model.py
index fb56cf9..09c2f39 100644
--- a/python/flink_agents/api/chat_models/chat_model.py
+++ b/python/flink_agents/api/chat_models/chat_model.py
@@ -25,7 +25,7 @@ from typing_extensions import override
 from flink_agents.api.chat_message import ChatMessage
 from flink_agents.api.prompts.prompt import Prompt
 from flink_agents.api.resource import Resource, ResourceType
-from flink_agents.api.tools.tool import BaseTool
+from flink_agents.api.tools.tool import Tool
 
 
 class BaseChatModelConnection(Resource, ABC):
@@ -96,7 +96,7 @@ class BaseChatModelConnection(Resource, ABC):
     def chat(
         self,
         messages: Sequence[ChatMessage],
-        tools: List[BaseTool] | None = None,
+        tools: List[Tool] | None = None,
         **kwargs: Any,
     ) -> ChatMessage:
         """Direct communication with model service for chat conversation.
diff --git a/python/flink_agents/api/execution_environment.py 
b/python/flink_agents/api/execution_environment.py
index cda5e41..9ccc4a4 100644
--- a/python/flink_agents/api/execution_environment.py
+++ b/python/flink_agents/api/execution_environment.py
@@ -17,7 +17,7 @@
 
#################################################################################
 import importlib
 from abc import ABC, abstractmethod
-from typing import Any, Callable, Dict, List, Type
+from typing import Any, Callable, Dict, List
 
 from importlib_resources import files
 from pyflink.common import TypeInformation
@@ -25,21 +25,11 @@ from pyflink.datastream import DataStream, KeySelector, 
StreamExecutionEnvironme
 from pyflink.table import Schema, StreamTableEnvironment, Table
 
 from flink_agents.api.agent import Agent
-from flink_agents.api.chat_models.chat_model import (
-    BaseChatModelConnection,
-    BaseChatModelSetup,
-)
 from flink_agents.api.configuration import Configuration
-from flink_agents.api.embedding_models.embedding_model import (
-    BaseEmbeddingModelConnection,
-    BaseEmbeddingModelSetup,
-)
-from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import ResourceType
-from flink_agents.api.tools.mcp import MCPServer
-from flink_agents.api.vector_stores.vector_store import (
-    BaseVectorStoreConnection,
-    BaseVectorStoreSetup,
+from flink_agents.api.resource import (
+    ResourceDescriptor,
+    ResourceType,
+    SerializableResource,
 )
 
 
@@ -214,228 +204,34 @@ class AgentsExecutionEnvironment(ABC):
     def execute(self) -> None:
         """Execute agent individually."""
 
-    def add_prompt(self, name: str, prompt: Prompt) -> 
"AgentsExecutionEnvironment":
-        """Register prompt to agent execution environment.
-
-        Parameters
-        ----------
-        name : str
-            The name of the prompt, should be unique in the same Agent.
-        prompt: Prompt
-            The prompt to be used in the agent.
-
-        Returns:
-        -------
-        AgentsExecutionEnvironment
-            The environment contains registered prompt.
-        """
-        if name in self._resources[ResourceType.PROMPT]:
-            msg = f"Prompt {name} already defined"
-            raise ValueError(msg)
-        self._resources[ResourceType.PROMPT][name] = prompt
-        return self
-
-    def add_tool(self, name: str, func: Callable) -> 
"AgentsExecutionEnvironment":
-        """Register function tool to agent execution environment.
-
-        Parameters
-        ----------
-        name : str
-            The name of the tool, should be unique in the same Agent.
-        func: Callable
-            The execution function of the tool.
-
-        Returns:
-        -------
-        AgentsExecutionEnvironment
-            The environment contains registered tool.
-        """
-        if name in self._resources[ResourceType.TOOL]:
-            msg = f"Function tool {name} already defined"
-            raise ValueError(msg)
-        self._resources[ResourceType.TOOL][name] = func
-        return self
-
-    def add_chat_model_connection(
-        self, name: str, connection: Type[BaseChatModelConnection], **kwargs: 
Any
-    ) -> "AgentsExecutionEnvironment":
-        """Register chat model connection to agent execution environment.
-
-        Parameters
-        ----------
-        name : str
-            The name of the chat model connection, should be unique in the 
same Agent.
-        connection: Type[BaseChatModelConnection]
-            The type of chat model connection.
-        **kwargs: Any
-            Initialize keyword arguments passed to the chat model connection.
-
-        Returns:
-        -------
-        AgentsExecutionEnvironment
-            The environment contains registered chat model connection.
-        """
-        if name in self._resources[ResourceType.CHAT_MODEL_CONNECTION]:
-            msg = f"Chat model connection {name} already defined"
-            raise ValueError(msg)
-        kwargs["name"] = name
-        self._resources[ResourceType.CHAT_MODEL_CONNECTION][name] = 
(connection, kwargs)
-        return self
-
-    def add_chat_model_setup(
-        self, name: str, chat_model: Type[BaseChatModelSetup], **kwargs: Any
-    ) -> "AgentsExecutionEnvironment":
-        """Register chat model setup to agent execution environment.
-
-        Parameters
-        ----------
-        name : str
-            The name of the chat model, should be unique in the same Agent.
-        chat_model: Type[BaseChatModel]
-            The type of chat model.
-        **kwargs: Any
-            Initialize keyword arguments passed to the chat model.
-
-        Returns:
-        -------
-        AgentsExecutionEnvironment
-            The environment contains registered chat model setup.
-        """
-        if name in self._resources[ResourceType.CHAT_MODEL]:
-            msg = f"Chat model setup {name} already defined"
-            raise ValueError(msg)
-        kwargs["name"] = name
-        self._resources[ResourceType.CHAT_MODEL][name] = (chat_model, kwargs)
-        return self
-
-    def add_embedding_model_connection(
-        self, name: str, connection: Type[BaseEmbeddingModelConnection], 
**kwargs: Any
+    def add_resource(
+        self, name: str, instance: SerializableResource | ResourceDescriptor
     ) -> "AgentsExecutionEnvironment":
-        """Register embedding model connection to agent execution environment.
+        """Register resource to agent execution environment.
 
         Parameters
         ----------
         name : str
-            The name of the embedding model connection, should be unique in 
the same
-            Agent.
-        connection: Type[BaseEmbeddingModelConnection]
-            The type of embedding model connection.
-        **kwargs: Any
-            Initialize keyword arguments passed to the embedding model 
connection.
-
-        Returns:
-        -------
-        AgentsExecutionEnvironment
-            The environment contains registered embedding model connection.
-        """
-        if name in self._resources[ResourceType.EMBEDDING_MODEL_CONNECTION]:
-            msg = f"Embedding model connection {name} already defined"
-            raise ValueError(msg)
-        kwargs["name"] = name
-        self._resources[ResourceType.EMBEDDING_MODEL_CONNECTION][name] = (
-            connection,
-            kwargs,
-        )
-        return self
-
-    def add_embedding_model_setup(
-        self, name: str, embedding_model: Type[BaseEmbeddingModelSetup], 
**kwargs: Any
-    ) -> "AgentsExecutionEnvironment":
-        """Register embedding model setup to agent execution environment.
-
-        Parameters
-        ----------
-        name : str
-            The name of the embedding model, should be unique in the same 
Agent.
-        embedding_model: Type[BaseEmbeddingModelSetup]
-            The type of embedding model.
-        **kwargs: Any
-            Initialize keyword arguments passed to the embedding model.
-
-        Returns:
-        -------
-        AgentsExecutionEnvironment
-            The environment contains registered embedding model setup.
-        """
-        if name in self._resources[ResourceType.EMBEDDING_MODEL]:
-            msg = f"Embedding model setup {name} already defined"
-            raise ValueError(msg)
-        kwargs["name"] = name
-        self._resources[ResourceType.EMBEDDING_MODEL][name] = 
(embedding_model, kwargs)
-        return self
-
-    def add_mcp_server(
-        self, name: str, mcp_server: MCPServer
-    ) -> "AgentsExecutionEnvironment":
-        """Add an MCP server to the agent execution environment.
-
-        Parameters
-        ----------
-        name : str
-            The name of the MCP server, should be unique in the same Agent.
-        mcp_server : MCPServer
-            The MCP server resource instance.
+            The name of the prompt, should be unique in the same Agent.
+        instance: SerializableResource | ResourceDescriptor
+            The serializable resource instance, or the descriptor of resource.
 
         Returns:
         -------
         AgentsExecutionEnvironment
-            The environment contains registered embedding model setup.
+            The environment to register the resource.
         """
-        if name in self._resources[ResourceType.MCP_SERVER]:
-            msg = f"MCP server {name} already defined"
-            raise ValueError(msg)
-        self._resources[ResourceType.MCP_SERVER][name] = mcp_server
-        return self
-
-    def add_vector_store_connection(
-            self, name: str, connection: Type[BaseVectorStoreConnection], 
**kwargs: Any
-    ) -> "AgentsExecutionEnvironment":
-        """Register vector store connection to agent execution environment.
-
-        Parameters
-        ----------
-        name : str
-            The name of the vector store connection, should be unique in the 
same
-            Agent.
-        connection: Type[BaseVectorStoreConnection]
-            The type of vector store connection.
-        **kwargs: Any
-            Initialize keyword arguments passed to the vector store connection.
+        if isinstance(instance, SerializableResource):
+            resource_type = instance.resource_type()
+        elif isinstance(instance, ResourceDescriptor):
+            resource_type = instance.clazz.resource_type()
+        else:
+            err_msg = f"Unexpected resource {instance}"
+            raise TypeError(err_msg)
 
-        Returns:
-        -------
-        AgentsExecutionEnvironment
-            The environment contains registered vector store connection.
-        """
-        if name in self._resources[ResourceType.VECTOR_STORE_CONNECTION]:
-            msg = f"Vector store connection {name} already defined"
+        if name in self._resources[resource_type]:
+            msg = f"{resource_type.value} {name} already defined"
             raise ValueError(msg)
-        kwargs["name"] = name
-        self._resources[ResourceType.VECTOR_STORE_CONNECTION][name] = 
(connection, kwargs)
-        return self
-
-    def add_vector_store_setup(
-            self, name: str, vector_store: Type[BaseVectorStoreSetup], 
**kwargs: Any
-    ) -> "AgentsExecutionEnvironment":
-        """Register vector store setup to agent execution environment.
 
-        Parameters
-        ----------
-        name : str
-            The name of the vector store, should be unique in the same Agent.
-        vector_store: Type[BaseVectorStoreSetup]
-            The type of vector store.
-        **kwargs: Any
-            Initialize keyword arguments passed to the vector store.
-
-        Returns:
-        -------
-        AgentsExecutionEnvironment
-            The environment contains registered vector store setup.
-        """
-        if name in self._resources[ResourceType.VECTOR_STORE]:
-            msg = f"Vector store setup {name} already defined"
-            raise ValueError(msg)
-        kwargs["name"] = name
-        self._resources[ResourceType.VECTOR_STORE][name] = (vector_store, 
kwargs)
+        self._resources[resource_type][name] = instance
         return self
diff --git a/python/flink_agents/api/resource.py 
b/python/flink_agents/api/resource.py
index c07de2e..5659e58 100644
--- a/python/flink_agents/api/resource.py
+++ b/python/flink_agents/api/resource.py
@@ -17,7 +17,7 @@
 
#################################################################################
 from abc import ABC, abstractmethod
 from enum import Enum
-from typing import Callable
+from typing import Any, Callable, Dict, Type
 
 from pydantic import BaseModel, Field, model_validator
 
@@ -72,3 +72,25 @@ class SerializableResource(Resource, ABC):
         """Ensure resource is serializable."""
         self.model_dump_json()
         return self
+
+
+class ResourceDescriptor:
+    """Descriptor of resource, includes the class and the initialize 
arguments."""
+
+    _clazz: Type[Resource]
+    _arguments: Dict[str, Any]
+
+    def __init__(self, *, clazz: Type[Resource], **arguments: Any) -> None:
+        """Init method."""
+        self._clazz = clazz
+        self._arguments = arguments
+
+    @property
+    def clazz(self) -> Type[Resource]:
+        """Get the class of the resource."""
+        return self._clazz
+
+    @property
+    def arguments(self) -> Dict[str, Any]:
+        """Get the initialize arguments of the resource."""
+        return self._arguments
diff --git a/python/flink_agents/api/tools/mcp.py 
b/python/flink_agents/api/tools/mcp.py
index 147a3c4..d59648c 100644
--- a/python/flink_agents/api/tools/mcp.py
+++ b/python/flink_agents/api/tools/mcp.py
@@ -39,11 +39,11 @@ from typing_extensions import override
 from flink_agents.api.chat_message import ChatMessage, MessageRole
 from flink_agents.api.prompts.prompt import Prompt
 from flink_agents.api.resource import ResourceType, SerializableResource
-from flink_agents.api.tools.tool import BaseTool, ToolMetadata, ToolType
+from flink_agents.api.tools.tool import Tool, ToolMetadata, ToolType
 from flink_agents.api.tools.utils import extract_mcp_content_item
 
 
-class MCPTool(BaseTool):
+class MCPTool(Tool):
     """MCP tool definition that can be called directly.
 
     This represents a single tool from an MCP server.
diff --git a/python/flink_agents/api/tools/tool.py 
b/python/flink_agents/api/tools/tool.py
index a410ec2..c42897f 100644
--- a/python/flink_agents/api/tools/tool.py
+++ b/python/flink_agents/api/tools/tool.py
@@ -20,7 +20,7 @@ from abc import ABC, abstractmethod
 from enum import Enum
 from typing import Any, Type
 
-from pydantic import BaseModel, field_serializer, model_validator
+from pydantic import BaseModel, Field, field_serializer, model_validator
 from typing_extensions import override
 
 from flink_agents.api.resource import ResourceType, SerializableResource
@@ -100,7 +100,20 @@ class ToolMetadata(BaseModel):
         return parameters
 
 
-class BaseTool(SerializableResource, ABC):
+class FunctionTool(SerializableResource):
+    """Tool container keeps a callable, mainly used to represent
+    a function which will be converted to BaseTool after compile.
+    """
+
+    func: typing.Callable = Field(exclude=True)
+
+    @classmethod
+    def resource_type(cls) -> ResourceType:
+        """Get the resource type."""
+        return ResourceType.TOOL
+
+
+class Tool(SerializableResource, ABC):
     """Base abstract class of all kinds of tools.
 
     Attributes:
@@ -111,6 +124,11 @@ class BaseTool(SerializableResource, ABC):
 
     metadata: ToolMetadata
 
+    @staticmethod
+    def from_callable(func: typing.Callable) -> FunctionTool:
+        """Create a function tool from a callable."""
+        return FunctionTool(func=func)
+
     @property
     def name(self) -> str:
         """Get the name of the tool."""
diff --git a/python/flink_agents/examples/chat_model_example.py 
b/python/flink_agents/examples/chat_model_example.py
index 04b2f7b..e657c43 100644
--- a/python/flink_agents/examples/chat_model_example.py
+++ b/python/flink_agents/examples/chat_model_example.py
@@ -16,14 +16,10 @@
 # limitations under the License.
 
#################################################################################
 import os
-from typing import Any, Dict, List, Tuple, Type
+from typing import List
 
 from flink_agents.api.agent import Agent
 from flink_agents.api.chat_message import ChatMessage, MessageRole
-from flink_agents.api.chat_models.chat_model import (
-    BaseChatModelConnection,
-    BaseChatModelSetup,
-)
 from flink_agents.api.decorators import (
     action,
     chat_model_connection,
@@ -36,6 +32,7 @@ from flink_agents.api.events.event import (
     OutputEvent,
 )
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
+from flink_agents.api.resource import ResourceDescriptor
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.integrations.chat_models.ollama_chat_model import (
     OllamaChatModelConnection,
@@ -50,57 +47,57 @@ TONGYI_MODEL = os.environ.get("TONGYI_CHAT_MODEL", 
"qwen-plus")
 OLLAMA_MODEL = os.environ.get("OLLAMA_CHAT_MODEL", "qwen3:0.6b")
 BACKENDS_TO_RUN: List[str] = ["Tongyi", "Ollama"]
 
+
 class MyAgent(Agent):
     """Example agent demonstrating the new ChatModel architecture."""
 
     @chat_model_connection
     @staticmethod
-    def tongyi_connection() -> Tuple[Type[BaseChatModelConnection], Dict[str, 
Any]]:
+    def tongyi_connection() -> ResourceDescriptor:
         """ChatModelConnection responsible for tongyi model service 
connection."""
         if not os.environ.get("DASHSCOPE_API_KEY"):
             msg = "Please set the 'DASHSCOPE_API_KEY' environment variable."
             raise ValueError(msg)
-        return TongyiChatModelConnection, {
-            "model": TONGYI_MODEL,
-        }
+        return ResourceDescriptor(clazz=TongyiChatModelConnection, 
model=TONGYI_MODEL)
 
     @chat_model_connection
     @staticmethod
-    def ollama_connection() -> Tuple[Type[BaseChatModelConnection], Dict[str, 
Any]]:
+    def ollama_connection() -> ResourceDescriptor:
         """ChatModelConnection responsible for ollama model service 
connection."""
-        return OllamaChatModelConnection, {
-            "model": OLLAMA_MODEL,
-        }
+        return ResourceDescriptor(clazz=OllamaChatModelConnection, 
model=OLLAMA_MODEL)
 
     @chat_model_setup
     @staticmethod
-    def math_chat_model() -> Tuple[Type[BaseChatModelSetup], Dict[str, Any]]:
+    def math_chat_model() -> ResourceDescriptor:
         """ChatModel which focus on math, and reuse ChatModelConnection."""
         if CURRENT_BACKEND == "Tongyi":
-            return TongyiChatModelSetup, {
-                "connection": "tongyi_connection",
-                "tools": ["add"],
-            }
+            return ResourceDescriptor(
+                clazz=TongyiChatModelSetup,
+                connection="tongyi_connection",
+                tools=["add"],
+            )
         else:
-            return OllamaChatModelSetup, {
-                "connection": "ollama_connection",
-                "tools": ["add"],
-                "extract_reasoning": True,
-            }
+            return ResourceDescriptor(
+                clazz=OllamaChatModelSetup,
+                connection="ollama_connection",
+                tools=["add"],
+                extract_reasoning=True,
+            )
 
     @chat_model_setup
     @staticmethod
-    def creative_chat_model() -> Tuple[Type[BaseChatModelSetup], Dict[str, 
Any]]:
+    def creative_chat_model() -> ResourceDescriptor:
         """ChatModel which focus on text generate, and reuse 
ChatModelConnection."""
         if CURRENT_BACKEND == "Tongyi":
-            return TongyiChatModelSetup, {
-                "connection": "tongyi_connection",
-            }
+            return ResourceDescriptor(
+                clazz=TongyiChatModelSetup, connection="tongyi_connection"
+            )
         else:
-            return OllamaChatModelSetup, {
-                "connection": "ollama_connection",
-                "extract_reasoning": True,
-            }
+            return ResourceDescriptor(
+                clazz=TongyiChatModelSetup,
+                connection="ollama_connection",
+                extract_reasoning=True,
+            )
 
     @tool
     @staticmethod
@@ -129,8 +126,17 @@ class MyAgent(Agent):
         In this action, we will send ChatRequestEvent to trigger built-in 
actions.
         """
         input_text = event.input.lower()
-        model_name = "math_chat_model" if ("calculate" in input_text or "sum" 
in input_text) else "creative_chat_model"
-        ctx.send_event(ChatRequestEvent(model=model_name, 
messages=[ChatMessage(role=MessageRole.USER, content=event.input)]))
+        model_name = (
+            "math_chat_model"
+            if ("calculate" in input_text or "sum" in input_text)
+            else "creative_chat_model"
+        )
+        ctx.send_event(
+            ChatRequestEvent(
+                model=model_name,
+                messages=[ChatMessage(role=MessageRole.USER, 
content=event.input)],
+            )
+        )
 
     @action(ChatResponseEvent)
     @staticmethod
@@ -150,7 +156,9 @@ if __name__ == "__main__":
             print("[SKIP] TongyiChatModel because DASHSCOPE_API_KEY is not 
set.")
             continue
 
-        print(f"\nRunning {backend}ChatModel while the using model is 
{CURRENT_MODEL}...")
+        print(
+            f"\nRunning {backend}ChatModel while the using model is 
{CURRENT_MODEL}..."
+        )
 
         env = AgentsExecutionEnvironment.get_execution_environment()
         input_list = []
@@ -166,4 +174,3 @@ if __name__ == "__main__":
         for output in output_list:
             for key, value in output.items():
                 print(f"{key}: {value}")
-
diff --git 
a/python/flink_agents/examples/integrate_table_with_react_agent_example.py 
b/python/flink_agents/examples/integrate_table_with_react_agent_example.py
index 8d3d0a5..ad8ec0a 100644
--- a/python/flink_agents/examples/integrate_table_with_react_agent_example.py
+++ b/python/flink_agents/examples/integrate_table_with_react_agent_example.py
@@ -30,6 +30,8 @@ from flink_agents.api.agents.react_agent import ReActAgent
 from flink_agents.api.chat_message import ChatMessage, MessageRole
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
 from flink_agents.api.prompts.prompt import Prompt
+from flink_agents.api.resource import ResourceDescriptor
+from flink_agents.api.tools.tool import Tool
 from flink_agents.examples.common_tools import add, multiply
 from flink_agents.integrations.chat_models.ollama_chat_model import (
     OllamaChatModelConnection,
@@ -49,7 +51,7 @@ class MyKeySelector(KeySelector):
 
 current_dir = Path(__file__).parent
 
-#TODO: Currently, this example may cause core dump when being executed, the 
root cause
+# TODO: Currently, this example may cause core dump when being executed, the 
root cause
 # may be known issue of pemja incorrect reference counting:
 # https://github.com/apache/flink-agents/issues/83
 if __name__ == "__main__":
@@ -74,11 +76,11 @@ if __name__ == "__main__":
 
     # register resource to execution environment
     (
-        env.add_chat_model_connection(
-            name="ollama", connection=OllamaChatModelConnection, model=model
+        env.add_resource(
+            "ollama", ResourceDescriptor(clazz=OllamaChatModelConnection, 
model=model)
         )
-        .add_tool("add", add)
-        .add_tool("multiply", multiply)
+        .add_resource("add", Tool.from_callable(add))
+        .add_resource("multiply", Tool.from_callable(multiply))
     )
 
     # prepare prompt
@@ -99,10 +101,10 @@ if __name__ == "__main__":
 
     # create ReAct agent.
     agent = ReActAgent(
-        chat_model_setup=OllamaChatModelSetup,
-        connection="ollama",
+        chat_model=ResourceDescriptor(
+            clazz=OllamaChatModelSetup, connection="ollama", tools=["add", 
"multiply"]
+        ),
         prompt=prompt,
-        tools=["add", "multiply"],
         output_schema=output_type_info,
     )
 
diff --git 
a/python/flink_agents/examples/quickstart/agents/product_suggestion_agent.py 
b/python/flink_agents/examples/quickstart/agents/product_suggestion_agent.py
index 31babc3..7e7eb06 100644
--- a/python/flink_agents/examples/quickstart/agents/product_suggestion_agent.py
+++ b/python/flink_agents/examples/quickstart/agents/product_suggestion_agent.py
@@ -17,15 +17,12 @@
 
#################################################################################
 import json
 import logging
-from typing import Any, Dict, List, Tuple, Type
+from typing import List
 
 from pydantic import BaseModel
 
 from flink_agents.api.agent import Agent
 from flink_agents.api.chat_message import ChatMessage, MessageRole
-from flink_agents.api.chat_models.chat_model import (
-    BaseChatModelSetup,
-)
 from flink_agents.api.decorators import (
     action,
     chat_model_setup,
@@ -34,6 +31,7 @@ from flink_agents.api.decorators import (
 from flink_agents.api.events.chat_event import ChatRequestEvent, 
ChatResponseEvent
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.prompts.prompt import Prompt
+from flink_agents.api.resource import ResourceDescriptor
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.integrations.chat_models.ollama_chat_model import (
     OllamaChatModelSetup,
@@ -112,13 +110,10 @@ class ProductSuggestionAgent(Agent):
 
     @chat_model_setup
     @staticmethod
-    def generate_suggestion_model() -> Tuple[Type[BaseChatModelSetup], 
Dict[str, Any]]:
+    def generate_suggestion_model() -> ResourceDescriptor:
         """ChatModel which focus on generating product suggestions."""
-        return OllamaChatModelSetup, {
-            "connection": "ollama_server",
-            "prompt": "generate_suggestion_prompt",
-            "extract_reasoning": True,
-        }
+        return ResourceDescriptor(clazz=OllamaChatModelSetup, 
connection="ollama_server",
+                                  prompt="generate_suggestion_prompt", 
extract_reasoning=True)
 
     @action(InputEvent)
     @staticmethod
diff --git 
a/python/flink_agents/examples/quickstart/agents/review_analysis_agent.py 
b/python/flink_agents/examples/quickstart/agents/review_analysis_agent.py
index 2b067c5..0cfa0f4 100644
--- a/python/flink_agents/examples/quickstart/agents/review_analysis_agent.py
+++ b/python/flink_agents/examples/quickstart/agents/review_analysis_agent.py
@@ -17,15 +17,11 @@
 
#################################################################################
 import json
 import logging
-from typing import Any, Dict, Tuple, Type
 
 from pydantic import BaseModel
 
 from flink_agents.api.agent import Agent
 from flink_agents.api.chat_message import ChatMessage, MessageRole
-from flink_agents.api.chat_models.chat_model import (
-    BaseChatModelSetup,
-)
 from flink_agents.api.decorators import (
     action,
     chat_model_setup,
@@ -34,6 +30,7 @@ from flink_agents.api.decorators import (
 from flink_agents.api.events.chat_event import ChatRequestEvent, 
ChatResponseEvent
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.prompts.prompt import Prompt
+from flink_agents.api.resource import ResourceDescriptor
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.integrations.chat_models.ollama_chat_model import (
     OllamaChatModelSetup,
@@ -111,13 +108,10 @@ class ReviewAnalysisAgent(Agent):
 
     @chat_model_setup
     @staticmethod
-    def review_analysis_model() -> Tuple[Type[BaseChatModelSetup], Dict[str, 
Any]]:
+    def review_analysis_model() -> ResourceDescriptor:
         """ChatModel which focus on review analysis."""
-        return OllamaChatModelSetup, {
-            "connection": "ollama_server",
-            "prompt": "review_analysis_prompt",
-            "extract_reasoning": True,
-        }
+        return ResourceDescriptor(clazz=OllamaChatModelSetup, 
connection="ollama_server",
+                                  prompt="review_analysis_prompt", 
extract_reasoning=True)
 
     @action(InputEvent)
     @staticmethod
diff --git 
a/python/flink_agents/examples/quickstart/product_improve_suggestion.py 
b/python/flink_agents/examples/quickstart/product_improve_suggestion.py
index 129027e..542a3e9 100644
--- a/python/flink_agents/examples/quickstart/product_improve_suggestion.py
+++ b/python/flink_agents/examples/quickstart/product_improve_suggestion.py
@@ -27,6 +27,7 @@ from pyflink.datastream.connectors.file_system import 
FileSource, StreamFormat
 from pyflink.datastream.window import TumblingProcessingTimeWindows
 
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
+from flink_agents.api.resource import ResourceDescriptor
 from flink_agents.examples.quickstart.agents.product_suggestion_agent import (
     ProductReviewSummary,
     ProductSuggestionAgent,
@@ -90,14 +91,13 @@ def main() -> None:
 
     # Add Ollama chat model connection to be used by the ReviewAnalysisAgent
     # and ProductSuggestionAgent.
-    agents_env.add_chat_model_connection(
+    agents_env.add_resource(
         "ollama_server",
-        OllamaChatModelConnection,
-        model="qwen3:8b",
-        request_timeout=120,
+        ResourceDescriptor(
+            clazz=OllamaChatModelConnection, model="qwen3:8b", 
request_timeout=120
+        ),
     )
 
-
     # Read product reviews from a text file as a streaming source.
     # Each line in the file should be a JSON string representing a 
ProductReview.
     product_review_stream = env.from_source(
diff --git a/python/flink_agents/examples/quickstart/product_review_analysis.py 
b/python/flink_agents/examples/quickstart/product_review_analysis.py
index 0ef06de..99a52b9 100644
--- a/python/flink_agents/examples/quickstart/product_review_analysis.py
+++ b/python/flink_agents/examples/quickstart/product_review_analysis.py
@@ -22,6 +22,7 @@ from pyflink.datastream import StreamExecutionEnvironment
 from pyflink.datastream.connectors.file_system import FileSource, StreamFormat
 
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
+from flink_agents.api.resource import ResourceDescriptor
 from flink_agents.examples.quickstart.agents.review_analysis_agent import (
     ProductReview,
     ReviewAnalysisAgent,
@@ -47,11 +48,11 @@ def main() -> None:
     agents_env = AgentsExecutionEnvironment.get_execution_environment(env)
 
     # Add Ollama chat model connection to be used by the ReviewAnalysisAgent.
-    agents_env.add_chat_model_connection(
+    agents_env.add_resource(
         "ollama_server",
-        OllamaChatModelConnection,
-        model="qwen3:8b",
-        request_timeout=120,
+        ResourceDescriptor(
+            clazz=OllamaChatModelConnection, model="qwen3:8b", 
request_timeout=120
+        ),
     )
 
     # Read product reviews from a text file as a streaming source.
diff --git a/python/flink_agents/examples/react_agent_example.py 
b/python/flink_agents/examples/react_agent_example.py
index 862ca20..0723480 100644
--- a/python/flink_agents/examples/react_agent_example.py
+++ b/python/flink_agents/examples/react_agent_example.py
@@ -23,6 +23,8 @@ from flink_agents.api.agents.react_agent import ReActAgent
 from flink_agents.api.chat_message import ChatMessage, MessageRole
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
 from flink_agents.api.prompts.prompt import Prompt
+from flink_agents.api.resource import ResourceDescriptor
+from flink_agents.api.tools.tool import Tool
 from flink_agents.examples.common_tools import add, multiply
 from flink_agents.integrations.chat_models.ollama_chat_model import (
     OllamaChatModelConnection,
@@ -47,11 +49,11 @@ if __name__ == "__main__":
 
     # register resource to execution environment
     (
-        env.add_chat_model_connection(
-            name="ollama", connection=OllamaChatModelConnection, model=model
+        env.add_resource(
+            "ollama", ResourceDescriptor(clazz=OllamaChatModelConnection, 
model=model)
         )
-        .add_tool("add", add)
-        .add_tool("multiply", multiply)
+        .add_resource("add", Tool.from_callable(add))
+        .add_resource("multiply", Tool.from_callable(multiply))
     )
 
     # prepare prompt
@@ -67,10 +69,10 @@ if __name__ == "__main__":
 
     # create ReAct agent.
     agent = ReActAgent(
-        chat_model_setup=OllamaChatModelSetup,
-        connection="ollama",
+        chat_model=ResourceDescriptor(
+            clazz=OllamaChatModelSetup, connection="ollama", tools=["add", 
"multiply"]
+        ),
         prompt=prompt,
-        tools=["add", "multiply"],
         output_schema=OutputData,
     )
 
diff --git 
a/python/flink_agents/integrations/chat_models/anthropic/anthropic_chat_model.py
 
b/python/flink_agents/integrations/chat_models/anthropic/anthropic_chat_model.py
index 99786ad..bef1043 100644
--- 
a/python/flink_agents/integrations/chat_models/anthropic/anthropic_chat_model.py
+++ 
b/python/flink_agents/integrations/chat_models/anthropic/anthropic_chat_model.py
@@ -28,7 +28,7 @@ from flink_agents.api.chat_models.chat_model import (
     BaseChatModelConnection,
     BaseChatModelSetup,
 )
-from flink_agents.api.tools.tool import BaseTool, ToolMetadata
+from flink_agents.api.tools.tool import Tool, ToolMetadata
 
 
 def to_anthropic_tool(*, metadata: ToolMetadata, skip_length_check: bool = 
False) -> ToolParam:
@@ -150,7 +150,7 @@ class AnthropicChatModelConnection(BaseChatModelConnection):
             self._client = Anthropic(api_key=self.api_key, 
max_retries=self.max_retries, timeout=self.timeout)
         return self._client
 
-    def chat(self, messages: Sequence[ChatMessage], tools: List[BaseTool] | 
None = None,
+    def chat(self, messages: Sequence[ChatMessage], tools: List[Tool] | None = 
None,
              **kwargs: Any) -> ChatMessage:
         """Direct communication with Anthropic model service for chat 
conversation."""
         anthropic_tools = None
diff --git 
a/python/flink_agents/integrations/chat_models/anthropic/tests/test_anthropic_chat_model.py
 
b/python/flink_agents/integrations/chat_models/anthropic/tests/test_anthropic_chat_model.py
index 45116ac..95a77e5 100644
--- 
a/python/flink_agents/integrations/chat_models/anthropic/tests/test_anthropic_chat_model.py
+++ 
b/python/flink_agents/integrations/chat_models/anthropic/tests/test_anthropic_chat_model.py
@@ -79,7 +79,7 @@ def test_anthropic_chat_with_tools() -> None:  # noqa : D103
         if type == ResourceType.CHAT_MODEL_CONNECTION:
             return connection
         else:
-            return from_callable(name=name, func=add)
+            return from_callable(func=add)
 
     chat_model = AnthropicChatModelSetup(
         name="anthropic",
diff --git a/python/flink_agents/integrations/chat_models/ollama_chat_model.py 
b/python/flink_agents/integrations/chat_models/ollama_chat_model.py
index 38d3cd7..0931467 100644
--- a/python/flink_agents/integrations/chat_models/ollama_chat_model.py
+++ b/python/flink_agents/integrations/chat_models/ollama_chat_model.py
@@ -26,7 +26,7 @@ from flink_agents.api.chat_models.chat_model import (
     BaseChatModelConnection,
     BaseChatModelSetup,
 )
-from flink_agents.api.tools.tool import BaseTool
+from flink_agents.api.tools.tool import Tool
 from flink_agents.integrations.chat_models.chat_model_utils import 
to_openai_tool
 
 DEFAULT_CONTEXT_WINDOW = 2048
@@ -89,7 +89,7 @@ class OllamaChatModelConnection(BaseChatModelConnection):
     def chat(
         self,
         messages: Sequence[ChatMessage],
-        tools: List[BaseTool] | None = None,
+        tools: List[Tool] | None = None,
         **kwargs: Any,
     ) -> ChatMessage:
         """Process a sequence of messages, and return a response."""
diff --git 
a/python/flink_agents/integrations/chat_models/openai/openai_chat_model.py 
b/python/flink_agents/integrations/chat_models/openai/openai_chat_model.py
index 9a2aa04..08dbaf8 100644
--- a/python/flink_agents/integrations/chat_models/openai/openai_chat_model.py
+++ b/python/flink_agents/integrations/chat_models/openai/openai_chat_model.py
@@ -26,7 +26,7 @@ from flink_agents.api.chat_models.chat_model import (
     BaseChatModelConnection,
     BaseChatModelSetup,
 )
-from flink_agents.api.tools.tool import BaseTool
+from flink_agents.api.tools.tool import Tool
 from flink_agents.integrations.chat_models.chat_model_utils import 
to_openai_tool
 from flink_agents.integrations.chat_models.openai.openai_utils import (
     convert_from_openai_message,
@@ -136,7 +136,7 @@ class OpenAIChatModelConnection(BaseChatModelConnection):
     def chat(
         self,
         messages: Sequence[ChatMessage],
-        tools: List[BaseTool] | None = None,
+        tools: List[Tool] | None = None,
         **kwargs: Any,
     ) -> ChatMessage:
         """Direct communication with model service for chat conversation.
diff --git 
a/python/flink_agents/integrations/chat_models/openai/tests/test_openai_chat_model.py
 
b/python/flink_agents/integrations/chat_models/openai/tests/test_openai_chat_model.py
index e69700a..dfd67ea 100644
--- 
a/python/flink_agents/integrations/chat_models/openai/tests/test_openai_chat_model.py
+++ 
b/python/flink_agents/integrations/chat_models/openai/tests/test_openai_chat_model.py
@@ -80,7 +80,7 @@ def test_openai_chat_with_tools() -> None:  # noqa : D103
         if type == ResourceType.CHAT_MODEL_CONNECTION:
             return connection
         else:
-            return from_callable(name=name, func=add)
+            return from_callable(func=add)
 
     chat_model = OpenAIChatModelSetup(
         name="openai",
diff --git 
a/python/flink_agents/integrations/chat_models/tests/test_ollama_chat_model.py 
b/python/flink_agents/integrations/chat_models/tests/test_ollama_chat_model.py
index 3cc0532..763c504 100644
--- 
a/python/flink_agents/integrations/chat_models/tests/test_ollama_chat_model.py
+++ 
b/python/flink_agents/integrations/chat_models/tests/test_ollama_chat_model.py
@@ -85,7 +85,7 @@ def add(a: int, b: int) -> int:
 
 
 def get_tool(name: str, type: ResourceType) -> FunctionTool:  # noqa :D103
-    return from_callable(name=name, func=add)
+    return from_callable(func=add)
 
 
 @pytest.mark.skipif(
diff --git 
a/python/flink_agents/integrations/chat_models/tests/test_tongyi_chat_model.py 
b/python/flink_agents/integrations/chat_models/tests/test_tongyi_chat_model.py
index b3b6fd1..7871b5d 100644
--- 
a/python/flink_agents/integrations/chat_models/tests/test_tongyi_chat_model.py
+++ 
b/python/flink_agents/integrations/chat_models/tests/test_tongyi_chat_model.py
@@ -64,7 +64,7 @@ def add(a: int, b: int) -> int:
 
 def get_tool(name: str, type: ResourceType) -> FunctionTool:
     """Helper function to create a tool for testing."""
-    return from_callable(name=name, func=add)
+    return from_callable(func=add)
 
 
 @pytest.mark.skipif(not api_key_available, reason="DashScope API key is not 
set")
diff --git a/python/flink_agents/integrations/chat_models/tongyi_chat_model.py 
b/python/flink_agents/integrations/chat_models/tongyi_chat_model.py
index 491de3c..3eed5a6 100644
--- a/python/flink_agents/integrations/chat_models/tongyi_chat_model.py
+++ b/python/flink_agents/integrations/chat_models/tongyi_chat_model.py
@@ -29,7 +29,7 @@ from flink_agents.api.chat_models.chat_model import (
     BaseChatModelConnection,
     BaseChatModelSetup,
 )
-from flink_agents.api.tools.tool import BaseTool, ToolMetadata
+from flink_agents.api.tools.tool import Tool, ToolMetadata
 
 DEFAULT_REQUEST_TIMEOUT = 60.0
 DEFAULT_MODEL = "qwen-plus"
@@ -103,7 +103,7 @@ class TongyiChatModelConnection(BaseChatModelConnection):
     def chat(
         self,
         messages: Sequence[ChatMessage],
-        tools: List[BaseTool] | None = None,
+        tools: List[Tool] | None = None,
         **kwargs: Any,
     ) -> ChatMessage:
         """Process a sequence of messages, and return a response."""
diff --git a/python/flink_agents/plan/agent_plan.py 
b/python/flink_agents/plan/agent_plan.py
index 671c308..54f71c5 100644
--- a/python/flink_agents/plan/agent_plan.py
+++ b/python/flink_agents/plan/agent_plan.py
@@ -270,98 +270,31 @@ def _get_actions(agent: Agent) -> List[Action]:
 
 def _get_resource_providers(agent: Agent) -> List[ResourceProvider]:
     resource_providers = []
+    # retrieve resource declared by decorator
     for name, value in agent.__class__.__dict__.items():
-        if hasattr(value, "_is_chat_model_setup"):
+        if (
+            hasattr(value, "_is_chat_model_setup")
+            or hasattr(value, "_is_chat_model_connection")
+            or hasattr(value, "_is_embedding_model_setup")
+            or hasattr(value, "_is_embedding_model_connection")
+            or hasattr(value, "_is_vector_store_setup")
+            or hasattr(value, "_is_vector_store_connection")
+        ):
             if isinstance(value, staticmethod):
                 value = value.__func__
 
             if callable(value):
-                clazz, kwargs = value()
-                provider = PythonResourceProvider(
-                    name=name,
-                    type=clazz.resource_type(),
-                    module=clazz.__module__,
-                    clazz=clazz.__name__,
-                    kwargs=kwargs,
-                )
-                resource_providers.append(provider)
-        elif hasattr(value, "_is_chat_model_connection"):
-            if isinstance(value, staticmethod):
-                value = value.__func__
-
-            if callable(value):
-                clazz, kwargs = value()
-                provider = PythonResourceProvider(
-                    name=name,
-                    type=clazz.resource_type(),
-                    module=clazz.__module__,
-                    clazz=clazz.__name__,
-                    kwargs=kwargs,
-                )
-                resource_providers.append(provider)
-        elif hasattr(value, "_is_embedding_model_setup"):
-            if isinstance(value, staticmethod):
-                value = value.__func__
-
-            if callable(value):
-                clazz, kwargs = value()
-                provider = PythonResourceProvider(
-                    name=name,
-                    type=clazz.resource_type(),
-                    module=clazz.__module__,
-                    clazz=clazz.__name__,
-                    kwargs=kwargs,
-                )
-                resource_providers.append(provider)
-        elif hasattr(value, "_is_embedding_model_connection"):
-            if isinstance(value, staticmethod):
-                value = value.__func__
-
-            if callable(value):
-                clazz, kwargs = value()
-                provider = PythonResourceProvider(
-                    name=name,
-                    type=clazz.resource_type(),
-                    module=clazz.__module__,
-                    clazz=clazz.__name__,
-                    kwargs=kwargs,
-                )
-                resource_providers.append(provider)
-        elif hasattr(value, "_is_vector_store_setup"):
-            if isinstance(value, staticmethod):
-                value = value.__func__
-
-            if callable(value):
-                clazz, kwargs = value()
-                provider = PythonResourceProvider(
-                    name=name,
-                    type=clazz.resource_type(),
-                    module=clazz.__module__,
-                    clazz=clazz.__name__,
-                    kwargs=kwargs,
+                resource_providers.append(
+                    PythonResourceProvider.get(name=name, descriptor=value())
                 )
-                resource_providers.append(provider)
-        elif hasattr(value, "_is_vector_store_connection"):
-            if isinstance(value, staticmethod):
-                value = value.__func__
 
-            if callable(value):
-                clazz, kwargs = value()
-                provider = PythonResourceProvider(
-                    name=name,
-                    type=clazz.resource_type(),
-                    module=clazz.__module__,
-                    clazz=clazz.__name__,
-                    kwargs=kwargs,
-                )
-                resource_providers.append(provider)
         elif hasattr(value, "_is_tool"):
             if isinstance(value, staticmethod):
                 value = value.__func__
 
             if callable(value):
                 # TODO: support other tool type.
-                tool = from_callable(name=name, func=value)
+                tool = from_callable(func=value)
                 resource_providers.append(
                     PythonSerializableResourceProvider.from_resource(
                         name=name, resource=tool
@@ -383,86 +316,35 @@ def _get_resource_providers(agent: Agent) -> 
List[ResourceProvider]:
             mcp_server = value()
             _add_mcp_server(name, resource_providers, mcp_server)
 
+    # retrieve resource declared by add interface
     for name, prompt in agent.resources[ResourceType.PROMPT].items():
         resource_providers.append(
             PythonSerializableResourceProvider.from_resource(name=name, 
resource=prompt)
         )
 
-    for name, func in agent.resources[ResourceType.TOOL].items():
-        tool = from_callable(name=name, func=func)
+    for name, tool in agent.resources[ResourceType.TOOL].items():
         resource_providers.append(
-            PythonSerializableResourceProvider.from_resource(name=name, 
resource=tool)
+            PythonSerializableResourceProvider.from_resource(
+                name=name, resource=from_callable(tool.func)
+            )
         )
 
     for name, mcp_server in agent.resources[ResourceType.MCP_SERVER].items():
         mcp_server = cast("MCPServer", mcp_server)
         _add_mcp_server(name, resource_providers, mcp_server)
 
-    for name, chat_model in agent.resources[ResourceType.CHAT_MODEL].items():
-        clazz, kwargs = chat_model
-        provider = PythonResourceProvider(
-            name=name,
-            type=clazz.resource_type(),
-            module=clazz.__module__,
-            clazz=clazz.__name__,
-            kwargs=kwargs,
-        )
-        resource_providers.append(provider)
-
-    for name, connection in 
agent.resources[ResourceType.CHAT_MODEL_CONNECTION].items():
-        clazz, kwargs = connection
-        provider = PythonResourceProvider(
-            name=name,
-            type=clazz.resource_type(),
-            module=clazz.__module__,
-            clazz=clazz.__name__,
-            kwargs=kwargs,
-        )
-        resource_providers.append(provider)
-
-    for name, embedding_model in 
agent.resources[ResourceType.EMBEDDING_MODEL].items():
-        clazz, kwargs = embedding_model
-        provider = PythonResourceProvider(
-            name=name,
-            type=clazz.resource_type(),
-            module=clazz.__module__,
-            clazz=clazz.__name__,
-            kwargs=kwargs,
-        )
-        resource_providers.append(provider)
-
-    for name, connection in 
agent.resources[ResourceType.EMBEDDING_MODEL_CONNECTION].items():
-        clazz, kwargs = connection
-        provider = PythonResourceProvider(
-            name=name,
-            type=clazz.resource_type(),
-            module=clazz.__module__,
-            clazz=clazz.__name__,
-            kwargs=kwargs,
-        )
-        resource_providers.append(provider)
-
-    for name, vector_store in 
agent.resources[ResourceType.VECTOR_STORE].items():
-        clazz, kwargs = vector_store
-        provider = PythonResourceProvider(
-            name=name,
-            type=clazz.resource_type(),
-            module=clazz.__module__,
-            clazz=clazz.__name__,
-            kwargs=kwargs,
-        )
-        resource_providers.append(provider)
-
-    for name, connection in 
agent.resources[ResourceType.VECTOR_STORE_CONNECTION].items():
-        clazz, kwargs = connection
-        provider = PythonResourceProvider(
-            name=name,
-            type=clazz.resource_type(),
-            module=clazz.__module__,
-            clazz=clazz.__name__,
-            kwargs=kwargs,
-        )
-        resource_providers.append(provider)
+    for resource_type in [
+        ResourceType.CHAT_MODEL,
+        ResourceType.CHAT_MODEL_CONNECTION,
+        ResourceType.EMBEDDING_MODEL,
+        ResourceType.EMBEDDING_MODEL_CONNECTION,
+        ResourceType.VECTOR_STORE,
+        ResourceType.VECTOR_STORE_CONNECTION,
+    ]:
+        for name, descriptor in agent.resources[resource_type].items():
+            resource_providers.append(
+                PythonResourceProvider.get(name=name, descriptor=descriptor)
+            )
 
     return resource_providers
 
diff --git a/python/flink_agents/plan/resource_provider.py 
b/python/flink_agents/plan/resource_provider.py
index aa177f7..417fc0b 100644
--- a/python/flink_agents/plan/resource_provider.py
+++ b/python/flink_agents/plan/resource_provider.py
@@ -24,6 +24,7 @@ from pydantic import BaseModel, Field
 
 from flink_agents.api.resource import (
     Resource,
+    ResourceDescriptor,
     ResourceType,
     SerializableResource,
 )
@@ -92,6 +93,19 @@ class PythonResourceProvider(ResourceProvider):
     clazz: str
     kwargs: Dict[str, Any]
 
+    @staticmethod
+    def get(name: str, descriptor: ResourceDescriptor) -> 
"PythonResourceProvider":
+        """Create PythonResourceProvider instance."""
+        clazz = descriptor.clazz
+        return PythonResourceProvider(
+                    name=name,
+                    type=clazz.resource_type(),
+                    module=clazz.__module__,
+                    clazz=clazz.__name__,
+                    kwargs=descriptor.arguments,
+                )
+
+
     def provide(self, get_resource: Callable, config: AgentConfiguration) -> 
Resource:
         """Create resource in runtime."""
         module = importlib.import_module(self.module)
diff --git 
a/python/flink_agents/plan/tests/compatibility/python_agent_plan_compatibility_test_agent.py
 
b/python/flink_agents/plan/tests/compatibility/python_agent_plan_compatibility_test_agent.py
index 59843f3..e8d388f 100644
--- 
a/python/flink_agents/plan/tests/compatibility/python_agent_plan_compatibility_test_agent.py
+++ 
b/python/flink_agents/plan/tests/compatibility/python_agent_plan_compatibility_test_agent.py
@@ -15,13 +15,14 @@
 #  See the License for the specific language governing permissions and
 # limitations under the License.
 
#################################################################################
-from typing import Any, Dict, Sequence, Tuple, Type
+from typing import Any, Dict, Sequence
 
 from flink_agents.api.agent import Agent
 from flink_agents.api.chat_message import ChatMessage
 from flink_agents.api.chat_models.chat_model import BaseChatModelSetup
 from flink_agents.api.decorators import action, chat_model_setup, tool
 from flink_agents.api.events.event import Event, InputEvent
+from flink_agents.api.resource import ResourceDescriptor
 from flink_agents.api.runner_context import RunnerContext
 
 
@@ -56,13 +57,11 @@ class PythonAgentPlanCompatibilityTestAgent(Agent):
 
     @chat_model_setup
     @staticmethod
-    def chat_model() -> Tuple[Type[BaseChatModelSetup], Dict[str, Any]]:
+    def chat_model() -> ResourceDescriptor:
         """ChatModel can be used in action."""
-        return MockChatModel, {
-            "name": "chat_model",
-            "prompt": "prompt",
-            "tools": ["add"],
-        }
+        return ResourceDescriptor(
+            clazz=MockChatModel, name="chat_model", prompt="prompt", 
tools=["add"]
+        )
 
     @tool
     @staticmethod
diff --git a/python/flink_agents/plan/tests/resources/agent_plan.json 
b/python/flink_agents/plan/tests/resources/agent_plan.json
index d010799..d451056 100644
--- a/python/flink_agents/plan/tests/resources/agent_plan.json
+++ b/python/flink_agents/plan/tests/resources/agent_plan.json
@@ -77,7 +77,6 @@
                 "module": "flink_agents.plan.tests.test_agent_plan",
                 "clazz": "MockChatModelImpl",
                 "kwargs": {
-                    "name": "mock",
                     "host": "8.8.8.8",
                     "desc": "mock resource just for testing.",
                     "connection": "mock"
@@ -92,7 +91,6 @@
                 "module": "flink_agents.plan.tests.test_agent_plan",
                 "clazz": "MockEmbeddingModelSetup",
                 "kwargs": {
-                    "name": "mock_embedding",
                     "model": "test-model",
                     "connection": "mock_embedding_conn"
                 },
@@ -106,7 +104,6 @@
                 "module": "flink_agents.plan.tests.test_agent_plan",
                 "clazz": "MockEmbeddingModelConnection",
                 "kwargs": {
-                    "name": "mock_embedding_conn",
                     "api_key": "mock-api-key"
                 },
                 "__resource_provider_type__": "PythonResourceProvider"
@@ -119,7 +116,6 @@
                 "module": "flink_agents.plan.tests.test_agent_plan",
                 "clazz": "MockVectorStoreSetup",
                 "kwargs": {
-                    "name": "mock_vector_store",
                     "connection": "mock_vector_conn",
                     "embedding_model": "mock_embedding",
                     "collection_name": "test_collection"
@@ -134,7 +130,6 @@
                 "module": "flink_agents.plan.tests.test_agent_plan",
                 "clazz": "MockVectorStoreConnection",
                 "kwargs": {
-                    "name": "mock_vector_conn",
                     "host": "localhost",
                     "port": 8000
                 },
diff --git a/python/flink_agents/plan/tests/test_agent_plan.py 
b/python/flink_agents/plan/tests/test_agent_plan.py
index b85db4a..600457a 100644
--- a/python/flink_agents/plan/tests/test_agent_plan.py
+++ b/python/flink_agents/plan/tests/test_agent_plan.py
@@ -17,7 +17,7 @@
 
#################################################################################
 import json
 from pathlib import Path
-from typing import Any, Dict, Sequence, Tuple, Type
+from typing import Any, Dict, Sequence
 
 import pytest
 
@@ -37,7 +37,7 @@ from flink_agents.api.embedding_models.embedding_model import 
(
     BaseEmbeddingModelSetup,
 )
 from flink_agents.api.events.event import Event, InputEvent, OutputEvent
-from flink_agents.api.resource import Resource, ResourceType
+from flink_agents.api.resource import ResourceDescriptor, ResourceType
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.api.vector_stores.vector_store import (
     BaseVectorStoreConnection,
@@ -109,7 +109,7 @@ class MockChatModelImpl(BaseChatModelSetup):  # noqa: D101
         )
 
 
-class MockEmbeddingModelConnection(BaseEmbeddingModelConnection): # noqa: D101
+class MockEmbeddingModelConnection(BaseEmbeddingModelConnection):  # noqa: D101
     api_key: str
 
     def embed(self, text: str, **kwargs: Any) -> list[float]:
@@ -127,19 +127,21 @@ class 
MockVectorStoreConnection(BaseVectorStoreConnection):  # noqa: D101
     host: str
     port: int
 
-    def query(self, embedding: list[float], limit: int = 10, **kwargs: Any) -> 
list[Document]:
+    def query(
+        self, embedding: list[float], limit: int = 10, **kwargs: Any
+    ) -> list[Document]:
         """Testing Implementation."""
         return [
             Document(
                 content="Mock document content",
                 metadata={"source": "test", "id": "doc1"},
-                id="doc1"
+                id="doc1",
             ),
             Document(
                 content="Another mock document",
                 metadata={"source": "test", "id": "doc2"},
-                id="doc2"
-            )
+                id="doc2",
+            ),
         ][:limit]
 
 
@@ -154,49 +156,46 @@ class MockVectorStoreSetup(BaseVectorStoreSetup):  # 
noqa: D101
 class MyAgent(Agent):  # noqa: D101
     @chat_model_setup
     @staticmethod
-    def mock() -> Tuple[Type[Resource], Dict[str, Any]]:  # noqa: D102
-        return MockChatModelImpl, {
-            "name": "mock",
-            "host": "8.8.8.8",
-            "desc": "mock resource just for testing.",
-            "connection": "mock",
-        }
+    def mock() -> ResourceDescriptor:  # noqa: D102
+        return ResourceDescriptor(
+            clazz=MockChatModelImpl,
+            host="8.8.8.8",
+            desc="mock resource just for testing.",
+            connection="mock",
+        )
 
     @embedding_model_connection
     @staticmethod
-    def mock_embedding_conn() -> Tuple[Type[Resource], Dict[str, Any]]:  # 
noqa: D102
-        return MockEmbeddingModelConnection, {
-            "name": "mock_embedding_conn",
-            "api_key": "mock-api-key",
-        }
+    def mock_embedding_conn() -> ResourceDescriptor:  # noqa: D102
+        return ResourceDescriptor(
+            clazz=MockEmbeddingModelConnection, api_key="mock-api-key"
+        )
 
     @embedding_model_setup
     @staticmethod
-    def mock_embedding() -> Tuple[Type[Resource], Dict[str, Any]]:  # noqa: 
D102
-        return MockEmbeddingModelSetup, {
-            "name": "mock_embedding",
-            "model": "test-model",
-            "connection": "mock_embedding_conn",
-        }
+    def mock_embedding() -> ResourceDescriptor:  # noqa: D102
+        return ResourceDescriptor(
+            clazz=MockEmbeddingModelSetup,
+            model="test-model",
+            connection="mock_embedding_conn",
+        )
 
     @vector_store_connection
     @staticmethod
-    def mock_vector_conn() -> Tuple[Type[Resource], Dict[str, Any]]:  # noqa: 
D102
-        return MockVectorStoreConnection, {
-            "name": "mock_vector_conn",
-            "host": "localhost",
-            "port": 8000,
-        }
+    def mock_vector_conn() -> ResourceDescriptor:  # noqa: D102
+        return ResourceDescriptor(
+            clazz=MockVectorStoreConnection, host="localhost", port=8000
+        )
 
     @vector_store_setup
     @staticmethod
-    def mock_vector_store() -> Tuple[Type[Resource], Dict[str, Any]]:  # noqa: 
D102
-        return MockVectorStoreSetup, {
-            "name": "mock_vector_store",
-            "connection": "mock_vector_conn",
-            "embedding_model": "mock_embedding",
-            "collection_name": "test_collection",
-        }
+    def mock_vector_store() -> ResourceDescriptor:  # noqa: D102
+        return ResourceDescriptor(
+            clazz=MockVectorStoreSetup,
+            connection="mock_vector_conn",
+            embedding_model="mock_embedding",
+            collection_name="test_collection",
+        )
 
     @action(InputEvent)
     @staticmethod
@@ -211,7 +210,9 @@ class MyAgent(Agent):  # noqa: D101
 
 @pytest.fixture(scope="module")
 def agent_plan() -> AgentPlan:  # noqa: D103
-    return AgentPlan.from_agent(MyAgent(), AgentConfiguration({"mock.key": 
"mock.value"}))
+    return AgentPlan.from_agent(
+        MyAgent(), AgentConfiguration({"mock.key": "mock.value"})
+    )
 
 
 current_dir = Path(__file__).parent
@@ -250,38 +251,48 @@ def test_add_action_and_resource_to_agent() -> None:  # 
noqa: D103
     my_agent.add_action(
         name="second_action", events=[InputEvent, MyEvent], 
func=MyAgent.second_action
     )
-    my_agent.add_chat_model_setup(
+    my_agent.add_resource(
         name="mock",
-        chat_model=MockChatModelImpl,
-        host="8.8.8.8",
-        desc="mock resource just for testing.",
-        connection="mock",
+        instance=ResourceDescriptor(
+            clazz=MockChatModelImpl,
+            host="8.8.8.8",
+            desc="mock resource just for testing.",
+            connection="mock",
+        ),
     )
-    my_agent.add_embedding_model_connection(
+
+    my_agent.add_resource(
         name="mock_embedding_conn",
-        connection=MockEmbeddingModelConnection,
-        api_key="mock-api-key",
+        instance=ResourceDescriptor(
+            clazz=MockEmbeddingModelConnection, api_key="mock-api-key"
+        ),
     )
-    my_agent.add_embedding_model_setup(
+    my_agent.add_resource(
         name="mock_embedding",
-        embedding_model=MockEmbeddingModelSetup,
-        model="test-model",
-        connection="mock_embedding_conn",
+        instance=ResourceDescriptor(
+            clazz=MockEmbeddingModelSetup,
+            model="test-model",
+            connection="mock_embedding_conn",
+        ),
     )
-    my_agent.add_vector_store_connection(
+    my_agent.add_resource(
         name="mock_vector_conn",
-        connection=MockVectorStoreConnection,
-        host="localhost",
-        port=8000,
+        instance=ResourceDescriptor(
+            clazz=MockVectorStoreConnection, host="localhost", port=8000
+        ),
     )
-    my_agent.add_vector_store_setup(
+    my_agent.add_resource(
         name="mock_vector_store",
-        vector_store=MockVectorStoreSetup,
-        connection="mock_vector_conn",
-        embedding_model="mock_embedding",
-        collection_name="test_collection",
+        instance=ResourceDescriptor(
+            clazz=MockVectorStoreSetup,
+            connection="mock_vector_conn",
+            embedding_model="mock_embedding",
+            collection_name="test_collection",
+        ),
+    )
+    agent_plan = AgentPlan.from_agent(
+        my_agent, AgentConfiguration({"mock.key": "mock.value"})
     )
-    agent_plan = AgentPlan.from_agent(my_agent, 
AgentConfiguration({"mock.key": "mock.value"}))
     json_value = agent_plan.model_dump_json(serialize_as_any=True, indent=4)
     with Path.open(Path(f"{current_dir}/resources/agent_plan.json")) as f:
         expected_json = f.read()
diff --git a/python/flink_agents/plan/tests/tools/test_function_tool.py 
b/python/flink_agents/plan/tests/tools/test_function_tool.py
index 19d7147..60127fb 100644
--- a/python/flink_agents/plan/tests/tools/test_function_tool.py
+++ b/python/flink_agents/plan/tests/tools/test_function_tool.py
@@ -45,7 +45,7 @@ def foo(bar: int, baz: str) -> str:
 
 @pytest.fixture(scope="module")
 def func_tool() -> FunctionTool:  # noqa: D103
-    return from_callable("foo", foo)
+    return from_callable(foo)
 
 
 def test_serialize_function_tool(func_tool: FunctionTool) -> None:  # noqa: 
D103
diff --git a/python/flink_agents/plan/tools/function_tool.py 
b/python/flink_agents/plan/tools/function_tool.py
index e2eb11a..f686bf2 100644
--- a/python/flink_agents/plan/tools/function_tool.py
+++ b/python/flink_agents/plan/tools/function_tool.py
@@ -20,12 +20,12 @@ from typing import Any, Callable
 from docstring_parser import parse
 from typing_extensions import override
 
-from flink_agents.api.tools.tool import BaseTool, ToolMetadata, ToolType
+from flink_agents.api.tools.tool import Tool, ToolMetadata, ToolType
 from flink_agents.api.tools.utils import create_schema_from_function
 from flink_agents.plan.function import JavaFunction, PythonFunction
 
 
-class FunctionTool(BaseTool):
+class FunctionTool(Tool):
     """Tool that takes in a function.
 
     Attributes:
@@ -47,22 +47,18 @@ class FunctionTool(BaseTool):
         return self.func(*args, **kwargs)
 
 
-def from_callable(name: str, func: Callable) -> FunctionTool:
+def from_callable(func: Callable) -> FunctionTool:
     """Create FunctionTool from a user defined function.
 
     Parameters
     ----------
-    name : str
-        Name of the tool function.
     func : Callable
         The function to analyze.
     """
     description = parse(func.__doc__).description
     metadata = ToolMetadata(
-        name=name,
+        name=func.__name__,
         description=description,
-        args_schema=create_schema_from_function(name=name, func=func),
-    )
-    return FunctionTool(
-        name=name, func=PythonFunction.from_callable(func), metadata=metadata
+        args_schema=create_schema_from_function(func.__name__, func=func),
     )
+    return FunctionTool(func=PythonFunction.from_callable(func), 
metadata=metadata)
diff --git a/python/flink_agents/runtime/tests/test_built_in_actions.py 
b/python/flink_agents/runtime/tests/test_built_in_actions.py
index b0d0095..74a72ac 100644
--- a/python/flink_agents/runtime/tests/test_built_in_actions.py
+++ b/python/flink_agents/runtime/tests/test_built_in_actions.py
@@ -16,7 +16,7 @@
 # limitations under the License.
 
#################################################################################
 import uuid
-from typing import Any, Dict, List, Sequence, Tuple, Type
+from typing import Any, Dict, List, Sequence
 
 from flink_agents.api.agent import Agent
 from flink_agents.api.chat_message import ChatMessage, MessageRole
@@ -35,7 +35,7 @@ from flink_agents.api.events.chat_event import 
ChatRequestEvent, ChatResponseEve
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
 from flink_agents.api.prompts.prompt import Prompt
-from flink_agents.api.resource import ResourceType
+from flink_agents.api.resource import ResourceDescriptor, ResourceType
 from flink_agents.api.runner_context import RunnerContext
 from flink_agents.api.tools.tool import ToolType
 
@@ -122,22 +122,20 @@ class MyAgent(Agent):
 
     @chat_model_connection
     @staticmethod
-    def mock_connection() -> Tuple[Type[BaseChatModelConnection], Dict[str, 
Any]]:
+    def mock_connection() -> ResourceDescriptor:
         """Chat model server can be used by ChatModel."""
-        return MockChatModelConnection, {
-            "name": "mock_connection",
-        }
+        return ResourceDescriptor(clazz=MockChatModelConnection)
 
     @chat_model_setup
     @staticmethod
-    def mock_chat_model() -> Tuple[Type[BaseChatModelSetup], Dict[str, Any]]:
+    def mock_chat_model() -> ResourceDescriptor:
         """Chat model can be used in action."""
-        return MockChatModel, {
-            "name": "mock_chat_model",
-            "connection": "mock_connection",
-            "prompt": "prompt",
-            "tools": ["add"],
-        }
+        return ResourceDescriptor(
+            clazz=MockChatModel,
+            connection="mock_connection",
+            prompt="prompt",
+            tools=["add"],
+        )
 
     @tool
     @staticmethod
diff --git a/python/flink_agents/runtime/tests/test_get_resource_in_action.py 
b/python/flink_agents/runtime/tests/test_get_resource_in_action.py
index 4bec1ec..8adf473 100644
--- a/python/flink_agents/runtime/tests/test_get_resource_in_action.py
+++ b/python/flink_agents/runtime/tests/test_get_resource_in_action.py
@@ -15,7 +15,7 @@
 #  See the License for the specific language governing permissions and
 # limitations under the License.
 
#################################################################################
-from typing import Any, Dict, Sequence, Tuple, Type
+from typing import Any, Dict, Sequence
 
 from flink_agents.api.agent import Agent
 from flink_agents.api.chat_message import ChatMessage, MessageRole
@@ -23,7 +23,7 @@ from flink_agents.api.chat_models.chat_model import 
BaseChatModelSetup
 from flink_agents.api.decorators import action, chat_model_setup, tool
 from flink_agents.api.events.event import InputEvent, OutputEvent
 from flink_agents.api.execution_environment import AgentsExecutionEnvironment
-from flink_agents.api.resource import Resource, ResourceType
+from flink_agents.api.resource import ResourceDescriptor, ResourceType
 from flink_agents.api.runner_context import RunnerContext
 
 
@@ -45,13 +45,9 @@ class MockChatModelImpl(BaseChatModelSetup):  # noqa: D101
 class MyAgent(Agent):  # noqa: D101
     @chat_model_setup
     @staticmethod
-    def mock_chat_model() -> Tuple[Type[Resource], Dict[str, Any]]:  # noqa: 
D102
-        return MockChatModelImpl, {
-            "name": "mock_chat_model",
-            "host": "8.8.8.8",
-            "desc": "mock chat model just for testing.",
-            "connection": "mock",
-        }
+    def mock_chat_model() -> ResourceDescriptor:  # noqa: D102
+        return ResourceDescriptor(clazz=MockChatModelImpl, host="8.8.8.8",
+                                  desc="mock chat model just for testing.", 
connection="mock")
 
     @tool
     @staticmethod


Reply via email to