This is an automated email from the ASF dual-hosted git repository.
jin pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-hugegraph-ai.git
The following commit(s) were added to refs/heads/main by this push:
new 6a82a859 fix(llm): Ollama embedding API usage and config param (#306)
6a82a859 is described below
commit 6a82a8595b530f8d44bd193f80f8cc65af11be14
Author: imbajin <[email protected]>
AuthorDate: Wed Nov 5 19:03:46 2025 +0800
fix(llm): Ollama embedding API usage and config param (#306)
Corrects the OllamaEmbedding instantiation to use 'model' instead of
'model_name' and updates the embed API call to pass input as a list,
matching the expected Ollama API format. Also removes the Linux-only
marker from the pycgraph dependency in pyproject.toml.
WE DO NEED TEST:)
---
hugegraph-llm/pyproject.toml | 2 +-
hugegraph-llm/src/hugegraph_llm/models/embeddings/init_embedding.py | 2 +-
hugegraph-llm/src/hugegraph_llm/models/embeddings/ollama.py | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/hugegraph-llm/pyproject.toml b/hugegraph-llm/pyproject.toml
index d702b301..095ac44f 100644
--- a/hugegraph-llm/pyproject.toml
+++ b/hugegraph-llm/pyproject.toml
@@ -97,7 +97,7 @@ allow-direct-references = true
[tool.uv.sources]
hugegraph-python-client = { workspace = true }
-pycgraph = { git = "https://github.com/ChunelFeng/CGraph.git", subdirectory =
"python", tag = "v3.2.0", marker = "sys_platform == 'linux'" }
+pycgraph = { git = "https://github.com/ChunelFeng/CGraph.git", subdirectory =
"python", tag = "v3.2.0" }
[tool.mypy]
disable_error_code = ["import-untyped"]
diff --git
a/hugegraph-llm/src/hugegraph_llm/models/embeddings/init_embedding.py
b/hugegraph-llm/src/hugegraph_llm/models/embeddings/init_embedding.py
index 26e579e4..b9b8527a 100644
--- a/hugegraph-llm/src/hugegraph_llm/models/embeddings/init_embedding.py
+++ b/hugegraph-llm/src/hugegraph_llm/models/embeddings/init_embedding.py
@@ -38,7 +38,7 @@ def get_embedding(llm_configs: LLMConfig):
)
if llm_configs.embedding_type == "ollama/local":
return OllamaEmbedding(
- model_name=llm_configs.ollama_embedding_model,
+ model=llm_configs.ollama_embedding_model,
host=llm_configs.ollama_embedding_host,
port=llm_configs.ollama_embedding_port,
)
diff --git a/hugegraph-llm/src/hugegraph_llm/models/embeddings/ollama.py
b/hugegraph-llm/src/hugegraph_llm/models/embeddings/ollama.py
index 28826099..755ba504 100644
--- a/hugegraph-llm/src/hugegraph_llm/models/embeddings/ollama.py
+++ b/hugegraph-llm/src/hugegraph_llm/models/embeddings/ollama.py
@@ -43,7 +43,7 @@ class OllamaEmbedding(BaseEmbedding):
def get_text_embedding(self, text: str) -> List[float]:
"""Comment"""
- return list(self.client.embed(model=self.model,
input=text)["embeddings"][0])
+ return list(self.client.embed(model=self.model,
input=[text])["embeddings"][0])
def get_texts_embeddings(self, texts: List[str], batch_size: int = 32) ->
List[List[float]]:
"""Get embeddings for multiple texts with automatic batch splitting.