cherry-pick 56e0365c
This commit is contained in:
parent
49b0953ac1
commit
086191ae5a
2 changed files with 38 additions and 58 deletions
|
|
@ -713,7 +713,6 @@ def create_app(args):
|
|||
)
|
||||
|
||||
# Step 3: Create optimized embedding function (calls underlying function directly)
|
||||
# Note: When model is None, each binding will use its own default model
|
||||
async def optimized_embedding_function(texts, embedding_dim=None):
|
||||
try:
|
||||
if binding == "lollms":
|
||||
|
|
@ -725,9 +724,9 @@ def create_app(args):
|
|||
if isinstance(lollms_embed, EmbeddingFunc)
|
||||
else lollms_embed
|
||||
)
|
||||
# lollms embed_model is not used (server uses configured vectorizer)
|
||||
# Only pass base_url and api_key
|
||||
return await actual_func(texts, base_url=host, api_key=api_key)
|
||||
return await actual_func(
|
||||
texts, embed_model=model, host=host, api_key=api_key
|
||||
)
|
||||
elif binding == "ollama":
|
||||
from lightrag.llm.ollama import ollama_embed
|
||||
|
||||
|
|
@ -746,16 +745,13 @@ def create_app(args):
|
|||
|
||||
ollama_options = OllamaEmbeddingOptions.options_dict(args)
|
||||
|
||||
# Pass embed_model only if provided, let function use its default (bge-m3:latest)
|
||||
kwargs = {
|
||||
"texts": texts,
|
||||
"host": host,
|
||||
"api_key": api_key,
|
||||
"options": ollama_options,
|
||||
}
|
||||
if model:
|
||||
kwargs["embed_model"] = model
|
||||
return await actual_func(**kwargs)
|
||||
return await actual_func(
|
||||
texts,
|
||||
embed_model=model,
|
||||
host=host,
|
||||
api_key=api_key,
|
||||
options=ollama_options,
|
||||
)
|
||||
elif binding == "azure_openai":
|
||||
from lightrag.llm.azure_openai import azure_openai_embed
|
||||
|
||||
|
|
@ -764,11 +760,7 @@ def create_app(args):
|
|||
if isinstance(azure_openai_embed, EmbeddingFunc)
|
||||
else azure_openai_embed
|
||||
)
|
||||
# Pass model only if provided, let function use its default otherwise
|
||||
kwargs = {"texts": texts, "api_key": api_key}
|
||||
if model:
|
||||
kwargs["model"] = model
|
||||
return await actual_func(**kwargs)
|
||||
return await actual_func(texts, model=model, api_key=api_key)
|
||||
elif binding == "aws_bedrock":
|
||||
from lightrag.llm.bedrock import bedrock_embed
|
||||
|
||||
|
|
@ -777,11 +769,7 @@ def create_app(args):
|
|||
if isinstance(bedrock_embed, EmbeddingFunc)
|
||||
else bedrock_embed
|
||||
)
|
||||
# Pass model only if provided, let function use its default otherwise
|
||||
kwargs = {"texts": texts}
|
||||
if model:
|
||||
kwargs["model"] = model
|
||||
return await actual_func(**kwargs)
|
||||
return await actual_func(texts, model=model)
|
||||
elif binding == "jina":
|
||||
from lightrag.llm.jina import jina_embed
|
||||
|
||||
|
|
@ -790,16 +778,13 @@ def create_app(args):
|
|||
if isinstance(jina_embed, EmbeddingFunc)
|
||||
else jina_embed
|
||||
)
|
||||
# Pass model only if provided, let function use its default (jina-embeddings-v4)
|
||||
kwargs = {
|
||||
"texts": texts,
|
||||
"embedding_dim": embedding_dim,
|
||||
"base_url": host,
|
||||
"api_key": api_key,
|
||||
}
|
||||
if model:
|
||||
kwargs["model"] = model
|
||||
return await actual_func(**kwargs)
|
||||
return await actual_func(
|
||||
texts,
|
||||
model=model,
|
||||
embedding_dim=embedding_dim,
|
||||
base_url=host,
|
||||
api_key=api_key,
|
||||
)
|
||||
elif binding == "gemini":
|
||||
from lightrag.llm.gemini import gemini_embed
|
||||
|
||||
|
|
@ -817,19 +802,14 @@ def create_app(args):
|
|||
|
||||
gemini_options = GeminiEmbeddingOptions.options_dict(args)
|
||||
|
||||
# Pass model only if provided, let function use its default (gemini-embedding-001)
|
||||
kwargs = {
|
||||
"texts": texts,
|
||||
"base_url": host,
|
||||
"api_key": api_key,
|
||||
"embedding_dim": embedding_dim,
|
||||
"task_type": gemini_options.get(
|
||||
"task_type", "RETRIEVAL_DOCUMENT"
|
||||
),
|
||||
}
|
||||
if model:
|
||||
kwargs["model"] = model
|
||||
return await actual_func(**kwargs)
|
||||
return await actual_func(
|
||||
texts,
|
||||
model=model,
|
||||
base_url=host,
|
||||
api_key=api_key,
|
||||
embedding_dim=embedding_dim,
|
||||
task_type=gemini_options.get("task_type", "RETRIEVAL_DOCUMENT"),
|
||||
)
|
||||
else: # openai and compatible
|
||||
from lightrag.llm.openai import openai_embed
|
||||
|
||||
|
|
@ -838,16 +818,13 @@ def create_app(args):
|
|||
if isinstance(openai_embed, EmbeddingFunc)
|
||||
else openai_embed
|
||||
)
|
||||
# Pass model only if provided, let function use its default (text-embedding-3-small)
|
||||
kwargs = {
|
||||
"texts": texts,
|
||||
"base_url": host,
|
||||
"api_key": api_key,
|
||||
"embedding_dim": embedding_dim,
|
||||
}
|
||||
if model:
|
||||
kwargs["model"] = model
|
||||
return await actual_func(**kwargs)
|
||||
return await actual_func(
|
||||
texts,
|
||||
model=model,
|
||||
base_url=host,
|
||||
api_key=api_key,
|
||||
embedding_dim=embedding_dim,
|
||||
)
|
||||
except ImportError as e:
|
||||
raise Exception(f"Failed to import {binding} embedding: {e}")
|
||||
|
||||
|
|
|
|||
|
|
@ -69,6 +69,7 @@ async def fetch_data(url, headers, data):
|
|||
)
|
||||
async def jina_embed(
|
||||
texts: list[str],
|
||||
model: str = "jina-embeddings-v4",
|
||||
embedding_dim: int = 2048,
|
||||
late_chunking: bool = False,
|
||||
base_url: str = None,
|
||||
|
|
@ -78,6 +79,8 @@ async def jina_embed(
|
|||
|
||||
Args:
|
||||
texts: List of texts to embed.
|
||||
model: The Jina embedding model to use (default: jina-embeddings-v4).
|
||||
Supported models: jina-embeddings-v3, jina-embeddings-v4, etc.
|
||||
embedding_dim: The embedding dimensions (default: 2048 for jina-embeddings-v4).
|
||||
**IMPORTANT**: This parameter is automatically injected by the EmbeddingFunc wrapper.
|
||||
Do NOT manually pass this parameter when calling the function directly.
|
||||
|
|
@ -107,7 +110,7 @@ async def jina_embed(
|
|||
"Authorization": f"Bearer {os.environ['JINA_API_KEY']}",
|
||||
}
|
||||
data = {
|
||||
"model": "jina-embeddings-v4",
|
||||
"model": model,
|
||||
"task": "text-matching",
|
||||
"dimensions": embedding_dim,
|
||||
"embedding_type": "base64",
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue