Merge branch 'dev' into langfuse_generation
This commit is contained in:
commit
0813f9e7ac
1 changed files with 14 additions and 4 deletions
|
|
@ -1,12 +1,17 @@
|
|||
from typing import Type
|
||||
import logging
|
||||
import os
|
||||
from typing import Type
|
||||
|
||||
from instructor.exceptions import InstructorRetryException
|
||||
from pydantic import BaseModel
|
||||
from tenacity import RetryError
|
||||
|
||||
from cognee.infrastructure.llm.get_llm_client import get_llm_client
|
||||
from cognee.infrastructure.llm.prompts import read_query_prompt
|
||||
from cognee.shared.data_models import SummarizedCode, SummarizedClass, SummarizedFunction
|
||||
from cognee.shared.data_models import SummarizedCode
|
||||
from cognee.tasks.summarization.mock_summary import get_mock_summarized_code
|
||||
|
||||
logger = logging.getLogger("extract_summary")
|
||||
|
||||
async def extract_summary(content: str, response_model: Type[BaseModel]):
|
||||
llm_client = get_llm_client()
|
||||
|
|
@ -14,7 +19,7 @@ async def extract_summary(content: str, response_model: Type[BaseModel]):
|
|||
system_prompt = read_query_prompt("summarize_content.txt")
|
||||
|
||||
llm_output = await llm_client.acreate_structured_output(content, system_prompt, response_model)
|
||||
|
||||
|
||||
return llm_output
|
||||
|
||||
async def extract_code_summary(content: str):
|
||||
|
|
@ -27,5 +32,10 @@ async def extract_code_summary(content: str):
|
|||
result = get_mock_summarized_code()
|
||||
return result
|
||||
else:
|
||||
result = await extract_summary(content, response_model=SummarizedCode)
|
||||
try:
|
||||
result = await extract_summary(content, response_model=SummarizedCode)
|
||||
except (RetryError, InstructorRetryException) as e:
|
||||
logger.error("Failed to extract code summary, falling back to mock summary", exc_info=e)
|
||||
result = get_mock_summarized_code()
|
||||
|
||||
return result
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue