This commit is contained in:
Raphaël MANSUY 2025-12-04 19:14:32 +08:00
parent fd486c287a
commit f16de69415

View file

@ -272,15 +272,19 @@ async def openai_complete_if_cache(
if timeout is not None:
kwargs["timeout"] = timeout
# Determine the correct model identifier to use
# For Azure OpenAI, we must use the deployment name instead of the model name
api_model = azure_deployment if use_azure and azure_deployment else model
try:
# Don't use async with context manager, use client directly
if "response_format" in kwargs:
response = await openai_async_client.chat.completions.parse(
model=model, messages=messages, **kwargs
model=api_model, messages=messages, **kwargs
)
else:
response = await openai_async_client.chat.completions.create(
model=model, messages=messages, **kwargs
model=api_model, messages=messages, **kwargs
)
except APIConnectionError as e:
logger.error(f"OpenAI API Connection Error: {e}")
@ -706,9 +710,13 @@ async def openai_embed(
)
async with openai_async_client:
# Determine the correct model identifier to use
# For Azure OpenAI, we must use the deployment name instead of the model name
api_model = azure_deployment if use_azure and azure_deployment else model
# Prepare API call parameters
api_params = {
"model": model,
"model": api_model,
"input": texts,
"encoding_format": "base64",
}
@ -774,7 +782,7 @@ async def azure_openai_complete_if_cache(
# Call the unified implementation with Azure-specific parameters
return await openai_complete_if_cache(
model=model,
model=deployment,
prompt=prompt,
system_prompt=system_prompt,
history_messages=history_messages,
@ -855,7 +863,7 @@ async def azure_openai_embed(
# Call the unified implementation with Azure-specific parameters
return await openai_embed(
texts=texts,
model=model or deployment,
model=deployment,
base_url=base_url,
api_key=api_key,
use_azure=True,