This is an automated email from the ASF dual-hosted git repository.
jin pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-hugegraph-ai.git
The following commit(s) were added to refs/heads/main by this push:
new 6a672cd refactor(llm): enhance the multi configs for LLM (#212)
6a672cd is described below
commit 6a672cdd3f2dc1a2d624ead0a92d32d452a9656b
Author: HLOVI <[email protected]>
AuthorDate: Fri May 9 17:50:41 2025 +0800
refactor(llm): enhance the multi configs for LLM (#212)
This PR aims to enhance configuration handling by integrating
environment variable management and improved logging. Key changes
include:
- Introducing os and dotenv imports to load environment variables in the
demo configuration block.
- Adding logic to read API keys from a .env file and conditionally
applying configuration updates based on their presence.
- Updating the base configuration class to verify and update the .env
file with added error handling and logging.
---------
Co-authored-by: imbajin <[email protected]>
---
.gitignore | 1 +
.../src/hugegraph_llm/config/models/base_config.py | 78 +++++++++++++++++-----
.../hugegraph_llm/demo/rag_demo/configs_block.py | 17 ++++-
3 files changed, 79 insertions(+), 17 deletions(-)
diff --git a/.gitignore b/.gitignore
index afe736e..1eb9be9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -130,6 +130,7 @@ config_prompt.yaml
# Environments
.env
+.env.bak
.venv
.vscode
.cursor
diff --git a/hugegraph-llm/src/hugegraph_llm/config/models/base_config.py
b/hugegraph-llm/src/hugegraph_llm/config/models/base_config.py
index 7ad0fda..69af55c 100644
--- a/hugegraph-llm/src/hugegraph_llm/config/models/base_config.py
+++ b/hugegraph-llm/src/hugegraph_llm/config/models/base_config.py
@@ -31,7 +31,7 @@ class BaseConfig(BaseSettings):
class Config:
env_file = env_path
case_sensitive = False
- extra = 'ignore' # ignore extra fields to avoid ValidationError
+ extra = 'ignore' # ignore extra fields to avoid ValidationError
env_ignore_empty = True
def generate_env(self):
@@ -69,19 +69,67 @@ class BaseConfig(BaseSettings):
set_key(env_path, k, v if v else "", quote_mode="never")
def check_env(self):
- config_dict = self.model_dump()
- config_dict = {k.upper(): v for k, v in config_dict.items()}
- env_config = dotenv_values(f"{env_path}")
- for k, v in config_dict.items():
- if k in env_config:
- continue
- log.info("Update %s: %s=%s", env_path, k, v)
- set_key(env_path, k, v if v else "", quote_mode="never")
+ """Synchronize configs between .env file and object.
+
+ This method performs two steps:
+ 1. Updates object attributes from .env file values when they differ
+ 2. Adds missing configuration items to the .env file
+ """
+ try:
+ # Read the.env file and prepare object config
+ env_config = dotenv_values(env_path)
+ config_dict = {k.upper(): v for k, v in self.model_dump().items()}
+
+ # Step 1: Update the object from .env when values differ
+ self._sync_env_to_object(env_config, config_dict)
+ # Step 2: Add missing config items to .env
+ self._sync_object_to_env(env_config, config_dict)
+ except Exception as e:
+ log.error("An error occurred when checking the .env variable file:
%s", str(e))
+ raise
+
+ def _sync_env_to_object(self, env_config, config_dict):
+ """Update object attributes from .env file values when they differ."""
+ for env_key, env_value in env_config.items():
+ if env_key in config_dict:
+ obj_value = config_dict[env_key]
+ obj_value_str = str(obj_value) if obj_value is not None else ""
+
+ if env_value != obj_value_str:
+ log.info("Update configuration from the file: %s=%s
(Original value: %s)",
+ env_key, env_value, obj_value_str)
+ # Update the object attribute (using lowercase key)
+ setattr(self, env_key.lower(), env_value)
+
+ def _sync_object_to_env(self, env_config, config_dict):
+ """Add missing configuration items to the .env file."""
+ for obj_key, obj_value in config_dict.items():
+ if obj_key not in env_config:
+ obj_value_str = str(obj_value) if obj_value is not None else ""
+ log.info("Add configuration items to the environment variable
file: %s=%s",
+ obj_key, obj_value)
+ # Add to .env
+ set_key(env_path, obj_key, obj_value_str, quote_mode="never")
def __init__(self, **data):
- super().__init__(**data)
- if not os.path.exists(env_path):
- self.generate_env()
- else:
- self.check_env()
- log.info("Loading %s successfully for %s!", env_path,
self.__class__.__name__)
+ try:
+ file_exists = os.path.exists(env_path)
+ # Step 1: Load environment variables if file exists
+ if file_exists:
+ env_config = dotenv_values(env_path)
+ for k, v in env_config.items():
+ os.environ[k] = v
+
+ # Step 2: Init the parent class with loaded environment variables
+ super().__init__(**data)
+ # Step 3: Handle environment file operations after initialization
+ if not file_exists:
+ self.generate_env()
+ else:
+ # Synchronize configurations between the object and .env file
+ self.check_env()
+
+ log.info("The %s file was loaded. Class: %s", env_path,
self.__class__.__name__)
+ except Exception as e:
+ log.error("An error occurred when initializing the configuration
object: %s", str(e))
+ raise
diff --git a/hugegraph-llm/src/hugegraph_llm/demo/rag_demo/configs_block.py
b/hugegraph-llm/src/hugegraph_llm/demo/rag_demo/configs_block.py
index 73d0600..7903474 100644
--- a/hugegraph-llm/src/hugegraph_llm/demo/rag_demo/configs_block.py
+++ b/hugegraph-llm/src/hugegraph_llm/demo/rag_demo/configs_block.py
@@ -16,11 +16,13 @@
# under the License.
import json
+import os
from functools import partial
from typing import Optional
import gradio as gr
import requests
+from dotenv import dotenv_values
from requests.auth import HTTPBasicAuth
from hugegraph_llm.config import huge_settings, llm_settings
@@ -261,7 +263,8 @@ def create_configs_block() -> list:
# TODO : use OOP to refactor the following code
with gr.Accordion("2. Set up the LLM.", open=False):
- gr.Markdown("> Tips: the openai option also support openai style api
from other providers.")
+ gr.Markdown("> Tips: The OpenAI option also support openai style api
from other providers. "
+ "**Refresh the page** to load the **latest configs** in
__UI__.")
with gr.Tab(label='chat'):
chat_llm_dropdown = gr.Dropdown(choices=["openai", "litellm",
"qianfan_wenxin", "ollama/local"],
value=getattr(llm_settings,
"chat_llm_type"), label="type")
@@ -308,7 +311,17 @@ def create_configs_block() -> list:
llm_config_input = [gr.Textbox(value="", visible=False)
for _ in range(4)]
llm_config_button = gr.Button("Apply configuration")
llm_config_button.click(apply_llm_config_with_chat_op,
inputs=llm_config_input)
-
+ # Determine whether there are Settings in the.env file
+ dir_name = os.path.dirname
+ package_path =
dir_name(dir_name(dir_name(dir_name(dir_name(os.path.abspath(__file__))))))
+ env_path = os.path.join(package_path, ".env")
+ env_vars = dotenv_values(env_path)
+ api_extract_key = env_vars.get("OPENAI_EXTRACT_API_KEY")
+ api_text2sql_key = env_vars.get("OPENAI_TEXT2GQL_API_KEY")
+ if not api_extract_key:
+ llm_config_button.click(apply_llm_config_with_text2gql_op,
inputs=llm_config_input)
+ if not api_text2sql_key:
+ llm_config_button.click(apply_llm_config_with_extract_op,
inputs=llm_config_input)
with gr.Tab(label='mini_tasks'):
extract_llm_dropdown = gr.Dropdown(choices=["openai", "litellm",
"qianfan_wenxin", "ollama/local"],
value=getattr(llm_settings,
"extract_llm_type"), label="type")