This is an automated email from the ASF dual-hosted git repository.

jin pushed a commit to branch text2gql
in repository https://gitbox.apache.org/repos/asf/incubator-hugegraph-ai.git

commit 999fea15923d38538a445df4963ae87f8b235030
Author: Lriver <[email protected]>
AuthorDate: Sun Oct 5 23:00:28 2025 +0800

    Compatible with OpenAI format
---
 text2gremlin/Vertical_Text2Gremlin/graph2gremlin.py |  2 +-
 text2gremlin/Vertical_Text2Gremlin/llm_handler.py   | 17 +++++++----------
 2 files changed, 8 insertions(+), 11 deletions(-)

diff --git a/text2gremlin/Vertical_Text2Gremlin/graph2gremlin.py 
b/text2gremlin/Vertical_Text2Gremlin/graph2gremlin.py
index 418a5e7d..148bd47f 100644
--- a/text2gremlin/Vertical_Text2Gremlin/graph2gremlin.py
+++ b/text2gremlin/Vertical_Text2Gremlin/graph2gremlin.py
@@ -11,7 +11,7 @@ load_dotenv()
 try:
     client = OpenAI(
         api_key=os.getenv("ds_api_key"),
-        base_url="https://api.deepseek.com/v1";,
+        base_url=os.getenv("base_url"),
     )
 except Exception as e:
     print(f"初始化OpenAI客户端失败,请检查环境变量: {e}")
diff --git a/text2gremlin/Vertical_Text2Gremlin/llm_handler.py 
b/text2gremlin/Vertical_Text2Gremlin/llm_handler.py
index 3e7e5528..dce999c2 100644
--- a/text2gremlin/Vertical_Text2Gremlin/llm_handler.py
+++ b/text2gremlin/Vertical_Text2Gremlin/llm_handler.py
@@ -5,17 +5,14 @@ from dotenv import load_dotenv
 from openai import OpenAI
 from typing import List, Dict
 
-
-
 load_dotenv()
-api_key = os.environ.get("DEEPSEEK_API_KEY")
-client = OpenAI(
-    api_key=os.getenv("ds_api_key"),
-    base_url="https://api.deepseek.com/v1";,
-)
+
+API_KEY = os.getenv("API_KEY")
+MODEL_NAME = os.getenv("MODEL_NAME")
+BASE_URL = os.getenv("BASE_URL")
 client = OpenAI(
-    api_key=os.getenv("ds_api_key"),
-    base_url="https://api.deepseek.com/v1";,
+    api_key=API_KEY,
+    base_url=BASE_URL,
 )
 
 def generate_gremlin_variations(seed_query: str, seed_questions: List[str]) -> 
List[str]:
@@ -99,7 +96,7 @@ def generate_texts_for_gremlin(gremlin_query: str) -> 
List[str]:
     # print(f"\n 问题生成的user prompt: \n{user_prompt}")
     try:
         response = client.chat.completions.create(
-            model="deepseek-chat",
+            model=MODEL_NAME,
             messages=[
                 {"role": "system", "content": system_prompt},
                 {"role": "user", "content": user_prompt}

Reply via email to