Added claude support. Still buggy due to instructor issues

This commit is contained in:
Vasilije 2024-03-28 11:40:10 +01:00
parent 90c41512ed
commit d1435f6cd3
2 changed files with 57 additions and 0 deletions

View file

@ -0,0 +1,57 @@
import asyncio
import aiohttp
from typing import List, Type
from pydantic import BaseModel
import instructor
from tenacity import retry, stop_after_attempt
import anthropic
import openai
from cognee.infrastructure.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.prompts import read_query_prompt
class AnthropicAdapter(LLMInterface):
"""Adapter for Ollama's API"""
def __init__(self, ollama_endpoint, api_key: str, model: str):
self.aclient = instructor.patch(
create=anthropic.Anthropic().messages.create,
mode=instructor.Mode.ANTHROPIC_TOOLS
)
self.model = model
@retry(stop=stop_after_attempt(5))
async def acreate_structured_output(self, text_input: str, system_prompt: str,
response_model: Type[BaseModel]) -> BaseModel:
"""Generate a response from a user query."""
return await self.aclient(
model=self.model,
max_tokens=4096,
max_retries=0,
messages=[
{
"role": "user",
"content": f"""Use the given format to
extract information from the following input: {text_input}. {system_prompt}""",
}
],
response_model=response_model,
)
def show_prompt(self, text_input: str, system_prompt: str) -> str:
"""Format and display the prompt for a user query."""
if not text_input:
text_input = "No user input provided."
if not system_prompt:
raise ValueError("No system prompt path provided.")
system_prompt = read_query_prompt(system_prompt)
formatted_prompt = f"""System Prompt:\n{system_prompt}\n\nUser Input:\n{text_input}\n""" if system_prompt else None
return formatted_prompt