feat: adds new error classes to llm and databases + introduces loglevel and logging from child error

This commit is contained in:
hajdul88 2025-08-13 13:40:50 +02:00
parent 6dcd59c73c
commit 32996aa0d0
10 changed files with 48 additions and 32 deletions

View file

@ -43,8 +43,10 @@ class CogneeSystemError(CogneeApiError):
message: str = "A system error occurred.",
name: str = "CogneeSystemError",
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
log=True,
log_level="ERROR",
):
super().__init__(message, name, status_code)
super().__init__(message, name, status_code, log, log_level)
class CogneeValidationError(CogneeApiError):
@ -55,8 +57,10 @@ class CogneeValidationError(CogneeApiError):
message: str = "A validation error occurred.",
name: str = "CogneeValidationError",
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
log=True,
log_level="ERROR",
):
super().__init__(message, name, status_code)
super().__init__(message, name, status_code, log, log_level)
class CogneeConfigurationError(CogneeApiError):
@ -67,8 +71,10 @@ class CogneeConfigurationError(CogneeApiError):
message: str = "A system configuration error occurred.",
name: str = "CogneeConfigurationError",
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
log=True,
log_level="ERROR",
):
super().__init__(message, name, status_code)
super().__init__(message, name, status_code, log, log_level)
class CogneeTransientError(CogneeApiError):
@ -79,6 +85,8 @@ class CogneeTransientError(CogneeApiError):
message: str = "A transient error occurred.",
name: str = "CogneeTransientError",
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
log=True,
log_level="ERROR",
):
super().__init__(message, name, status_code)
super().__init__(message, name, status_code, log, log_level)

View file

@ -1,20 +0,0 @@
from cognee.exceptions import CogneeApiError
from fastapi import status
class EmbeddingException(CogneeApiError):
"""
Custom exception for handling embedding-related errors.
This exception class is designed to indicate issues specifically related to embeddings
within the application. It extends the base exception class CogneeApiError and allows
for customization of the error message, name, and status code.
"""
def __init__(
self,
message: str = "Embedding Exception.",
name: str = "EmbeddingException",
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
):
super().__init__(message, name, status_code)

View file

@ -8,4 +8,5 @@ from .exceptions import (
EntityNotFoundError,
EntityAlreadyExistsError,
DatabaseNotCreatedError,
EmbeddingException,
)

View file

@ -84,3 +84,21 @@ class NodesetFilterNotSupportedError(CogneeConfigurationError):
self.message = message
self.name = name
self.status_code = status_code
class EmbeddingException(CogneeConfigurationError):
"""
Custom exception for handling embedding-related errors.
This exception class is designed to indicate issues specifically related to embeddings
within the application. It extends the base exception class CogneeConfigurationError allows
for customization of the error message, name, and status code.
"""
def __init__(
self,
message: str = "Embedding Exception.",
name: str = "EmbeddingException",
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
):
super().__init__(message, name, status_code)

View file

@ -6,7 +6,7 @@ import math
import litellm
import os
from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine
from cognee.infrastructure.databases.exceptions.EmbeddingException import EmbeddingException
from cognee.infrastructure.databases.exceptions import EmbeddingException
from cognee.infrastructure.llm.tokenizer.Gemini import (
GeminiTokenizer,
)

View file

@ -16,5 +16,7 @@ class CollectionNotFoundError(CogneeValidationError):
message,
name: str = "CollectionNotFoundError",
status_code: int = status.HTTP_422_UNPROCESSABLE_ENTITY,
log=True,
log_level="DEBUG",
):
super().__init__(message, name, status_code)
super().__init__(message, name, status_code, log, log_level)

View file

@ -9,7 +9,7 @@ from sqlalchemy.exc import ProgrammingError
from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential
from asyncpg import DeadlockDetectedError, DuplicateTableError, UniqueViolationError
from cognee.exceptions import InvalidValueError
from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.engine import DataPoint
from cognee.infrastructure.engine.utils import parse_id
@ -275,7 +275,7 @@ class PGVectorAdapter(SQLAlchemyAdapter, VectorDBInterface):
return metadata.tables[collection_name]
else:
raise CollectionNotFoundError(
f"Collection '{collection_name}' not found!", log_level="DEBUG"
f"Collection '{collection_name}' not found!",
)
async def retrieve(self, collection_name: str, data_point_ids: List[str]):

View file

@ -20,3 +20,11 @@ class UnsupportedLLMProviderError(CogneeValidationError):
def __init__(self, provider: str):
message = f"Unsupported LLM provider: {provider}"
super().__init__(message=message, name="UnsupportedLLMProviderError")
class MissingSystemPromptPathError(CogneeValidationError):
def __init__(
self,
name: str = "MissingSystemPromptPathError",
):
message = "No system prompt path provided."
super().__init__(message, name)

View file

@ -7,12 +7,11 @@ from openai import ContentFilterFinishReasonError
from litellm.exceptions import ContentPolicyViolationError
from instructor.exceptions import InstructorRetryException
from cognee.exceptions import InvalidValueError
from cognee.infrastructure.llm.LLMGateway import LLMGateway
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.llm_interface import (
LLMInterface,
)
from cognee.infrastructure.llm.exceptions import ContentPolicyFilterError
from cognee.infrastructure.llm.exceptions import ContentPolicyFilterError, MissingSystemPromptPathError
from cognee.infrastructure.files.utils.open_data_file import open_data_file
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.rate_limiter import (
rate_limit_async,
@ -325,7 +324,7 @@ class OpenAIAdapter(LLMInterface):
if not text_input:
text_input = "No user input provided."
if not system_prompt:
raise InvalidValueError(message="No system prompt path provided.")
raise MissingSystemPromptPathError()
system_prompt = LLMGateway.read_query_prompt(system_prompt)
formatted_prompt = (

View file

@ -1,6 +1,6 @@
from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.databases.exceptions.EmbeddingException import EmbeddingException
from cognee.infrastructure.databases.exceptions import EmbeddingException
from cognee.infrastructure.databases.vector import get_vector_engine
from cognee.infrastructure.engine import DataPoint