Merge pull request #771 from enriquecatala/fix-litellm-litellm-proxy-support
Enable LiteLLM proxy with embedding_binding_host
This commit is contained in:
commit
4d58ff8bb4
1 changed files with 2 additions and 1 deletions
|
|
@ -1027,7 +1027,8 @@ def create_app(args):
|
|||
if args.embedding_binding == "azure_openai"
|
||||
else openai_embed(
|
||||
texts,
|
||||
model=args.embedding_model, # no host is used for openai,
|
||||
model=args.embedding_model,
|
||||
base_url=args.embedding_binding_host,
|
||||
api_key=args.embedding_binding_api_key,
|
||||
),
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue