add locally deployed llm (#841)
### What problem does this PR solve? ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
parent
2b36283712
commit
a7bd427116
1 changed files with 16 additions and 1 deletions
|
|
@ -298,4 +298,19 @@ class LocalLLM(Base):
|
||||||
)
|
)
|
||||||
return ans, num_tokens_from_string(ans)
|
return ans, num_tokens_from_string(ans)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return "**ERROR**: " + str(e), 0
|
return "**ERROR**: " + str(e), 0
|
||||||
|
|
||||||
|
def chat_streamly(self, system, history, gen_conf):
|
||||||
|
if system:
|
||||||
|
history.insert(0, {"role": "system", "content": system})
|
||||||
|
token_count = 0
|
||||||
|
answer = ""
|
||||||
|
try:
|
||||||
|
for ans in self.client.chat_streamly(history, gen_conf):
|
||||||
|
answer += ans
|
||||||
|
token_count += 1
|
||||||
|
yield answer
|
||||||
|
except Exception as e:
|
||||||
|
yield answer + "\n**ERROR**: " + str(e)
|
||||||
|
|
||||||
|
yield token_count
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue