feat: adds new error classes to litellm instructor
This commit is contained in:
parent
1b3898dd8b
commit
b40dbf3c68
2 changed files with 23 additions and 3 deletions
|
|
@ -1,5 +1,22 @@
|
|||
from cognee.exceptions.exceptions import CriticalError
|
||||
from cognee.exceptions.exceptions import CogneeValidationError
|
||||
|
||||
|
||||
class ContentPolicyFilterError(CriticalError):
|
||||
class ContentPolicyFilterError(CogneeValidationError):
|
||||
pass
|
||||
|
||||
|
||||
class LLMAPIKeyNotSetError(CogneeValidationError):
|
||||
"""
|
||||
Raised when the LLM API key is not set in the configuration.
|
||||
"""
|
||||
def __init__(self, message: str = "LLM API key is not set."):
|
||||
super().__init__(message=message, name="LLMAPIKeyNotSetError")
|
||||
|
||||
|
||||
class UnsupportedLLMProviderError(CogneeValidationError):
|
||||
"""
|
||||
Raised when an unsupported LLM provider is specified in the configuration.
|
||||
"""
|
||||
def __init__(self, provider: str):
|
||||
message = f"Unsupported LLM provider: {provider}"
|
||||
super().__init__(message=message, name="UnsupportedLLMProviderError")
|
||||
|
|
|
|||
|
|
@ -2,11 +2,14 @@
|
|||
|
||||
from enum import Enum
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.infrastructure.llm import get_llm_config
|
||||
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.ollama.adapter import (
|
||||
OllamaAPIAdapter,
|
||||
)
|
||||
from cognee.infrastructure.llm.exceptions import (
|
||||
LLMAPIKeyNotSetError,
|
||||
UnsupportedLLMProviderError,
|
||||
)
|
||||
|
||||
|
||||
# Define an Enum for LLM Providers
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue