fix batch size error for qianwen embedding (#2431)
### What problem does this PR solve? #2402 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
parent
d3262ca378
commit
3044cb85fd
1 changed files with 1 additions and 0 deletions
|
|
@ -155,6 +155,7 @@ class QWenEmbed(Base):
|
||||||
|
|
||||||
def encode(self, texts: list, batch_size=10):
|
def encode(self, texts: list, batch_size=10):
|
||||||
import dashscope
|
import dashscope
|
||||||
|
batch_size = min(batch_size, 4)
|
||||||
try:
|
try:
|
||||||
res = []
|
res = []
|
||||||
token_count = 0
|
token_count = 0
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue