wenjin272 commented on code in PR #121:
URL: https://github.com/apache/flink-agents/pull/121#discussion_r2297673392


##########
python/flink_agents/integrations/chat_models/tongyi_chat_model.py:
##########
@@ -0,0 +1,249 @@
+################################################################################
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+# limitations under the License.
+#################################################################################
+import contextlib
+import json
+import os
+import uuid
+from typing import Any, Dict, List, Optional, Sequence, cast
+
+import dashscope
+from dashscope import Generation
+from pydantic import Field
+
+from flink_agents.api.chat_message import ChatMessage, MessageRole
+from flink_agents.api.chat_models.chat_model import (
+    BaseChatModelConnection,
+    BaseChatModelSetup,
+)
+from flink_agents.api.tools.tool import BaseTool
+
+DEFAULT_REQUEST_TIMEOUT = 60.0
+DEFAULT_MODEL = "qwen-plus"
+
+
+class TongyiChatModelConnection(BaseChatModelConnection):
+    """Tongyi ChatModelConnection which manages the connection to the Tongyi 
API server.
+
+    Attributes:
+    ----------
+    api_key : str
+        Your DashScope API key.
+    model : str
+        Model name to use.
+    request_timeout : float
+        The timeout for making http request to Tongyi API server.
+    """
+
+    api_key: str = Field(
+        default_factory=lambda: os.environ.get("DASHSCOPE_API_KEY"),
+        description="Your DashScope API key.",
+    )
+    model: str = Field(default=DEFAULT_MODEL, description="Model name to use.")
+    request_timeout: float = Field(
+        default=DEFAULT_REQUEST_TIMEOUT,
+        description="The timeout for making http request to Tongyi API 
server.",
+    )
+
+    def __init__(
+        self,
+        model: str = DEFAULT_MODEL,
+        api_key: Optional[str] = None,
+        request_timeout: Optional[float] = DEFAULT_REQUEST_TIMEOUT,
+        **kwargs: Any,
+    ) -> None:
+        """Init method."""
+        resolved_api_key = api_key or os.environ.get("DASHSCOPE_API_KEY")
+        if not resolved_api_key:
+            msg = (
+                "DashScope API key is not provided. "
+                "Please pass it as an argument or set the 'DASHSCOPE_API_KEY' 
environment variable."
+            )
+            raise ValueError(msg)
+
+        dashscope.api_key = resolved_api_key

Review Comment:
   What's the scope of the dashcope.api_key? Can two TongyiChatModelConnection 
use two different api_key?



##########
python/flink_agents/integrations/chat_models/tongyi_chat_model.py:
##########
@@ -0,0 +1,249 @@
+################################################################################
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+# limitations under the License.
+#################################################################################
+import contextlib
+import json
+import os
+import uuid
+from typing import Any, Dict, List, Optional, Sequence, cast
+
+import dashscope
+from dashscope import Generation
+from pydantic import Field
+
+from flink_agents.api.chat_message import ChatMessage, MessageRole
+from flink_agents.api.chat_models.chat_model import (
+    BaseChatModelConnection,
+    BaseChatModelSetup,
+)
+from flink_agents.api.tools.tool import BaseTool
+
+DEFAULT_REQUEST_TIMEOUT = 60.0
+DEFAULT_MODEL = "qwen-plus"
+
+
+class TongyiChatModelConnection(BaseChatModelConnection):
+    """Tongyi ChatModelConnection which manages the connection to the Tongyi 
API server.
+
+    Attributes:
+    ----------
+    api_key : str
+        Your DashScope API key.
+    model : str
+        Model name to use.
+    request_timeout : float
+        The timeout for making http request to Tongyi API server.
+    """
+
+    api_key: str = Field(
+        default_factory=lambda: os.environ.get("DASHSCOPE_API_KEY"),
+        description="Your DashScope API key.",
+    )
+    model: str = Field(default=DEFAULT_MODEL, description="Model name to use.")
+    request_timeout: float = Field(
+        default=DEFAULT_REQUEST_TIMEOUT,
+        description="The timeout for making http request to Tongyi API 
server.",
+    )
+
+    def __init__(
+        self,
+        model: str = DEFAULT_MODEL,
+        api_key: Optional[str] = None,
+        request_timeout: Optional[float] = DEFAULT_REQUEST_TIMEOUT,
+        **kwargs: Any,
+    ) -> None:
+        """Init method."""
+        resolved_api_key = api_key or os.environ.get("DASHSCOPE_API_KEY")
+        if not resolved_api_key:
+            msg = (
+                "DashScope API key is not provided. "
+                "Please pass it as an argument or set the 'DASHSCOPE_API_KEY' 
environment variable."
+            )
+            raise ValueError(msg)
+
+        dashscope.api_key = resolved_api_key
+
+        super().__init__(
+            model=model,
+            api_key=resolved_api_key,
+            request_timeout=request_timeout,
+            **kwargs,
+        )
+
+    def chat(
+        self,
+        messages: Sequence[ChatMessage],
+        tools: Optional[List[BaseTool]] = None,
+        **kwargs: Any,
+    ) -> ChatMessage:
+        """Process a sequence of messages, and return a response."""
+        tongyi_messages = self.__convert_to_tongyi_messages(messages)
+
+        tongyi_tools: Optional[List[Dict[str, Any]]] = (
+            [tool.metadata.to_dashscope_tool() for tool in tools] if tools 
else None
+        )
+
+        extract_reasoning = bool(kwargs.pop("extract_reasoning", False))
+
+        response = Generation.call(

Review Comment:
   Maybe we could set api_key here?



##########
python/flink_agents/api/tools/tool.py:
##########
@@ -115,6 +115,23 @@ def to_openai_tool(self, skip_length_check: bool = False) 
-> typing.Dict[str, An
             },
         }
 
+    def to_dashscope_tool(self, skip_length_check: bool = False) -> 
typing.Dict[str, Any]:  # noqa:FBT001

Review Comment:
   This function it better to be declared in tongyi_chat_model.py. For it is a 
chat model specific function. 
   The to_openai_tool function is declared here for almost all chat models 
support open ai format tools.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to