update max_token_size according to openai doc
This commit is contained in:
parent
38c8a6e97c
commit
9b92d425f6
1 changed files with 1 additions and 1 deletions
|
|
@ -556,7 +556,7 @@ async def openai_embedding(
|
|||
return np.array([dp.embedding for dp in response.data])
|
||||
|
||||
|
||||
@wrap_embedding_func_with_attrs(embedding_dim=1536, max_token_size=8192)
|
||||
@wrap_embedding_func_with_attrs(embedding_dim=1536, max_token_size=8191)
|
||||
@retry(
|
||||
stop=stop_after_attempt(3),
|
||||
wait=wait_exponential(multiplier=1, min=4, max=10),
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue