feature: Introduces new error handling (4 base errors + specified hierarchical exception handling) (#1242)
<!-- .github/pull_request_template.md --> ## Description Introduces new error handling (4 base errors + specified hierarchical exception handling) ## DCO Affirmation I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin.
This commit is contained in:
commit
271e7e9cf4
67 changed files with 603 additions and 231 deletions
|
|
@ -221,14 +221,6 @@ async def cognify(data: str, graph_model_file: str = None, graph_model_name: str
|
|||
- The actual cognify process may take significant time depending on text length
|
||||
- Use the cognify_status tool to check the progress of the operation
|
||||
|
||||
Raises
|
||||
------
|
||||
InvalidValueError
|
||||
If LLM_API_KEY is not set
|
||||
ValueError
|
||||
If chunks exceed max token limits (reduce chunk_size)
|
||||
DatabaseNotCreatedError
|
||||
If databases are not properly initialized
|
||||
"""
|
||||
|
||||
async def cognify_task(
|
||||
|
|
@ -512,14 +504,6 @@ async def search(search_query: str, search_type: str) -> list:
|
|||
- Different search types produce different output formats
|
||||
- The function handles the conversion between Cognee's internal result format and MCP's output format
|
||||
|
||||
Raises
|
||||
------
|
||||
InvalidValueError
|
||||
If LLM_API_KEY is not set (for LLM-based search types)
|
||||
ValueError
|
||||
If query_text is empty or search parameters are invalid
|
||||
NoDataError
|
||||
If no relevant data found for the search query
|
||||
"""
|
||||
|
||||
async def search_task(search_query: str, search_type: str) -> str:
|
||||
|
|
|
|||
|
|
@ -134,11 +134,6 @@ async def add(
|
|||
- VECTOR_DB_PROVIDER: "lancedb" (default), "chromadb", "pgvector"
|
||||
- GRAPH_DATABASE_PROVIDER: "kuzu" (default), "neo4j", "networkx"
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If specified file paths don't exist
|
||||
PermissionError: If user lacks access to files or dataset
|
||||
UnsupportedFileTypeError: If file format cannot be processed
|
||||
InvalidValueError: If LLM_API_KEY is not set or invalid
|
||||
"""
|
||||
tasks = [
|
||||
Task(resolve_data_directories, include_subdirectories=True),
|
||||
|
|
|
|||
|
|
@ -177,14 +177,6 @@ async def cognify(
|
|||
- LLM_PROVIDER, LLM_MODEL, VECTOR_DB_PROVIDER, GRAPH_DATABASE_PROVIDER
|
||||
- LLM_RATE_LIMIT_ENABLED: Enable rate limiting (default: False)
|
||||
- LLM_RATE_LIMIT_REQUESTS: Max requests per interval (default: 60)
|
||||
|
||||
Raises:
|
||||
DatasetNotFoundError: If specified datasets don't exist
|
||||
PermissionError: If user lacks processing rights
|
||||
InvalidValueError: If LLM_API_KEY is not set
|
||||
OntologyParsingError: If ontology file is malformed
|
||||
ValueError: If chunks exceed max token limits (reduce chunk_size)
|
||||
DatabaseNotCreatedError: If databases are not properly initialized
|
||||
"""
|
||||
tasks = await get_default_tasks(user, graph_model, chunker, chunk_size, ontology_file_path)
|
||||
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
import os
|
||||
from cognee.base_config import get_base_config
|
||||
from cognee.exceptions import InvalidValueError, InvalidAttributeError
|
||||
from cognee.modules.cognify.config import get_cognify_config
|
||||
from cognee.infrastructure.data.chunking.config import get_chunk_config
|
||||
from cognee.infrastructure.databases.vector import get_vectordb_config
|
||||
|
|
@ -11,6 +10,7 @@ from cognee.infrastructure.llm.config import (
|
|||
get_llm_config,
|
||||
)
|
||||
from cognee.infrastructure.databases.relational import get_relational_config, get_migration_config
|
||||
from cognee.api.v1.exceptions.exceptions import InvalidConfigAttributeError
|
||||
|
||||
|
||||
class config:
|
||||
|
|
@ -92,9 +92,7 @@ class config:
|
|||
if hasattr(llm_config, key):
|
||||
object.__setattr__(llm_config, key, value)
|
||||
else:
|
||||
raise InvalidAttributeError(
|
||||
message=f"'{key}' is not a valid attribute of the config."
|
||||
)
|
||||
raise InvalidConfigAttributeError(attribute=key)
|
||||
|
||||
@staticmethod
|
||||
def set_chunk_strategy(chunk_strategy: object):
|
||||
|
|
@ -131,9 +129,7 @@ class config:
|
|||
if hasattr(relational_db_config, key):
|
||||
object.__setattr__(relational_db_config, key, value)
|
||||
else:
|
||||
raise InvalidAttributeError(
|
||||
message=f"'{key}' is not a valid attribute of the config."
|
||||
)
|
||||
raise InvalidConfigAttributeError(attribute=key)
|
||||
|
||||
@staticmethod
|
||||
def set_migration_db_config(config_dict: dict):
|
||||
|
|
@ -145,9 +141,7 @@ class config:
|
|||
if hasattr(migration_db_config, key):
|
||||
object.__setattr__(migration_db_config, key, value)
|
||||
else:
|
||||
raise InvalidAttributeError(
|
||||
message=f"'{key}' is not a valid attribute of the config."
|
||||
)
|
||||
raise InvalidConfigAttributeError(attribute=key)
|
||||
|
||||
@staticmethod
|
||||
def set_graph_db_config(config_dict: dict) -> None:
|
||||
|
|
@ -171,9 +165,7 @@ class config:
|
|||
if hasattr(vector_db_config, key):
|
||||
object.__setattr__(vector_db_config, key, value)
|
||||
else:
|
||||
raise InvalidAttributeError(
|
||||
message=f"'{key}' is not a valid attribute of the config."
|
||||
)
|
||||
InvalidConfigAttributeError(attribute=key)
|
||||
|
||||
@staticmethod
|
||||
def set_vector_db_key(db_key: str):
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from cognee.infrastructure.databases.relational import get_relational_engine
|
|||
from cognee.modules.data.methods import get_authorized_existing_datasets
|
||||
from cognee.modules.data.methods import create_dataset, get_datasets_by_name
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.api.v1.delete.exceptions import DataNotFoundError, DatasetNotFoundError
|
||||
from cognee.api.v1.exceptions import DataNotFoundError, DatasetNotFoundError
|
||||
from cognee.modules.users.models import User
|
||||
from cognee.modules.users.methods import get_authenticated_user
|
||||
from cognee.modules.users.permissions.methods import (
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ from cognee.modules.users.methods import get_default_user
|
|||
from cognee.modules.data.methods import get_authorized_existing_datasets
|
||||
from cognee.context_global_variables import set_database_global_context_variables
|
||||
|
||||
from cognee.api.v1.delete.exceptions import (
|
||||
from cognee.api.v1.exceptions import (
|
||||
DocumentNotFoundError,
|
||||
DatasetNotFoundError,
|
||||
DocumentSubgraphNotFoundError,
|
||||
|
|
|
|||
13
cognee/api/v1/exceptions/__init__.py
Normal file
13
cognee/api/v1/exceptions/__init__.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
"""
|
||||
Custom exceptions for the Cognee API.
|
||||
|
||||
This module defines a set of exceptions for handling various data errors
|
||||
"""
|
||||
|
||||
from .exceptions import (
|
||||
InvalidConfigAttributeError,
|
||||
DocumentNotFoundError,
|
||||
DatasetNotFoundError,
|
||||
DataNotFoundError,
|
||||
DocumentSubgraphNotFoundError,
|
||||
)
|
||||
|
|
@ -1,10 +1,19 @@
|
|||
from cognee.exceptions import CogneeApiError
|
||||
from cognee.exceptions import CogneeConfigurationError, CogneeValidationError
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class DocumentNotFoundError(CogneeApiError):
|
||||
"""Raised when a document cannot be found in the database."""
|
||||
class InvalidConfigAttributeError(CogneeConfigurationError):
|
||||
def __init__(
|
||||
self,
|
||||
attribute: str,
|
||||
name: str = "InvalidConfigAttributeError",
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
):
|
||||
message = f"'{attribute}' is not a valid attribute of the configuration."
|
||||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class DocumentNotFoundError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Document not found in database.",
|
||||
|
|
@ -14,9 +23,7 @@ class DocumentNotFoundError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class DatasetNotFoundError(CogneeApiError):
|
||||
"""Raised when a dataset cannot be found."""
|
||||
|
||||
class DatasetNotFoundError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Dataset not found.",
|
||||
|
|
@ -26,9 +33,7 @@ class DatasetNotFoundError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class DataNotFoundError(CogneeApiError):
|
||||
"""Raised when a dataset cannot be found."""
|
||||
|
||||
class DataNotFoundError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Data not found.",
|
||||
|
|
@ -38,9 +43,7 @@ class DataNotFoundError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class DocumentSubgraphNotFoundError(CogneeApiError):
|
||||
"""Raised when a document's subgraph cannot be found in the graph database."""
|
||||
|
||||
class DocumentSubgraphNotFoundError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Document subgraph not found in graph database.",
|
||||
|
|
@ -158,13 +158,6 @@ async def search(
|
|||
- VECTOR_DB_PROVIDER: Must match what was used during cognify
|
||||
- GRAPH_DATABASE_PROVIDER: Must match what was used during cognify
|
||||
|
||||
Raises:
|
||||
DatasetNotFoundError: If specified datasets don't exist or aren't accessible
|
||||
PermissionDeniedError: If user lacks read access to requested datasets
|
||||
NoDataError: If no relevant data found for the search query
|
||||
InvalidValueError: If LLM_API_KEY is not set (for LLM-based search types)
|
||||
ValueError: If query_text is empty or search parameters are invalid
|
||||
CollectionNotFoundError: If vector collection not found (data not processed)
|
||||
"""
|
||||
# We use lists from now on for datasets
|
||||
if isinstance(datasets, UUID) or isinstance(datasets, str):
|
||||
|
|
|
|||
|
|
@ -2,13 +2,13 @@
|
|||
Custom exceptions for the Cognee API.
|
||||
|
||||
This module defines a set of exceptions for handling various application errors,
|
||||
such as service failures, resource conflicts, and invalid operations.
|
||||
such as System, Validation, Configuration or TransientErrors
|
||||
"""
|
||||
|
||||
from .exceptions import (
|
||||
CogneeApiError,
|
||||
ServiceError,
|
||||
InvalidValueError,
|
||||
InvalidAttributeError,
|
||||
CriticalError,
|
||||
CogneeSystemError,
|
||||
CogneeValidationError,
|
||||
CogneeConfigurationError,
|
||||
CogneeTransientError,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -35,37 +35,57 @@ class CogneeApiError(Exception):
|
|||
return f"{self.name}: {self.message} (Status code: {self.status_code})"
|
||||
|
||||
|
||||
class ServiceError(CogneeApiError):
|
||||
"""Failures in external services or APIs, like a database or a third-party service"""
|
||||
class CogneeSystemError(CogneeApiError):
|
||||
"""System error"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Service is unavailable.",
|
||||
name: str = "ServiceError",
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
message: str = "A system error occurred.",
|
||||
name: str = "CogneeSystemError",
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
log=True,
|
||||
log_level="ERROR",
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
super().__init__(message, name, status_code, log, log_level)
|
||||
|
||||
|
||||
class InvalidValueError(CogneeApiError):
|
||||
class CogneeValidationError(CogneeApiError):
|
||||
"""Validation error"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Invalid Value.",
|
||||
name: str = "InvalidValueError",
|
||||
message: str = "A validation error occurred.",
|
||||
name: str = "CogneeValidationError",
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
log=True,
|
||||
log_level="ERROR",
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
super().__init__(message, name, status_code, log, log_level)
|
||||
|
||||
|
||||
class InvalidAttributeError(CogneeApiError):
|
||||
class CogneeConfigurationError(CogneeApiError):
|
||||
"""SystemConfigError"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Invalid attribute.",
|
||||
name: str = "InvalidAttributeError",
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
message: str = "A system configuration error occurred.",
|
||||
name: str = "CogneeConfigurationError",
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
log=True,
|
||||
log_level="ERROR",
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
super().__init__(message, name, status_code, log, log_level)
|
||||
|
||||
|
||||
class CriticalError(CogneeApiError):
|
||||
pass
|
||||
class CogneeTransientError(CogneeApiError):
|
||||
"""TransientError"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "A transient error occurred.",
|
||||
name: str = "CogneeTransientError",
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
log=True,
|
||||
log_level="ERROR",
|
||||
):
|
||||
super().__init__(message, name, status_code, log, log_level)
|
||||
|
|
|
|||
7
cognee/infrastructure/data/exceptions/__init__.py
Normal file
7
cognee/infrastructure/data/exceptions/__init__.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
"""
|
||||
Custom exceptions for the Cognee API.
|
||||
|
||||
This module defines a set of exceptions for handling various data errors
|
||||
"""
|
||||
|
||||
from .exceptions import KeywordExtractionError
|
||||
22
cognee/infrastructure/data/exceptions/exceptions.py
Normal file
22
cognee/infrastructure/data/exceptions/exceptions.py
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
from cognee.exceptions import (
|
||||
CogneeValidationError,
|
||||
)
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class KeywordExtractionError(CogneeValidationError):
|
||||
"""
|
||||
Raised when a provided value is syntactically valid but semantically unacceptable
|
||||
for the given operation.
|
||||
|
||||
Example:
|
||||
- Passing an empty string to a keyword extraction function.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Extract_keywords cannot extract keywords from empty text.",
|
||||
name: str = "KeywordExtractionError",
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
from sklearn.feature_extraction.text import TfidfVectorizer
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.infrastructure.data.exceptions.exceptions import KeywordExtractionError
|
||||
from cognee.shared.utils import extract_pos_tags
|
||||
|
||||
|
||||
|
|
@ -8,7 +8,7 @@ def extract_keywords(text: str) -> list[str]:
|
|||
"""
|
||||
Extract keywords from the provided text string.
|
||||
|
||||
This function raises an InvalidValueError if the input text is empty. It processes the
|
||||
This function raises an KeyWordExtractionError if the input text is empty. It processes the
|
||||
text to extract parts of speech, focusing on nouns, and uses TF-IDF to identify the most
|
||||
relevant keywords based on their frequency. The function returns a list of up to 15
|
||||
keywords, each having more than 3 characters.
|
||||
|
|
@ -25,7 +25,7 @@ def extract_keywords(text: str) -> list[str]:
|
|||
with more than 3 characters.
|
||||
"""
|
||||
if len(text) == 0:
|
||||
raise InvalidValueError(message="extract_keywords cannot extract keywords from empty text.")
|
||||
raise KeywordExtractionError()
|
||||
|
||||
tags = extract_pos_tags(text)
|
||||
nouns = [word for (word, tag) in tags if tag == "NN"]
|
||||
|
|
|
|||
|
|
@ -1,20 +0,0 @@
|
|||
from cognee.exceptions import CogneeApiError
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class EmbeddingException(CogneeApiError):
|
||||
"""
|
||||
Custom exception for handling embedding-related errors.
|
||||
|
||||
This exception class is designed to indicate issues specifically related to embeddings
|
||||
within the application. It extends the base exception class CogneeApiError and allows
|
||||
for customization of the error message, name, and status code.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Embedding Exception.",
|
||||
name: str = "EmbeddingException",
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
|
@ -8,4 +8,7 @@ from .exceptions import (
|
|||
EntityNotFoundError,
|
||||
EntityAlreadyExistsError,
|
||||
DatabaseNotCreatedError,
|
||||
EmbeddingException,
|
||||
MissingQueryParameterError,
|
||||
MutuallyExclusiveQueryParametersError,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
from fastapi import status
|
||||
from cognee.exceptions import CogneeApiError, CriticalError
|
||||
from cognee.exceptions import CogneeSystemError, CogneeValidationError, CogneeConfigurationError
|
||||
|
||||
|
||||
class DatabaseNotCreatedError(CriticalError):
|
||||
class DatabaseNotCreatedError(CogneeSystemError):
|
||||
"""
|
||||
Represents an error indicating that the database has not been created. This error should
|
||||
be raised when an attempt is made to access the database before it has been initialized.
|
||||
|
||||
Inherits from CriticalError. Overrides the constructor to include a default message and
|
||||
Inherits from CogneeSystemError. Overrides the constructor to include a default message and
|
||||
status code.
|
||||
"""
|
||||
|
||||
|
|
@ -20,10 +20,10 @@ class DatabaseNotCreatedError(CriticalError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class EntityNotFoundError(CogneeApiError):
|
||||
class EntityNotFoundError(CogneeValidationError):
|
||||
"""
|
||||
Represents an error when a requested entity is not found in the database. This class
|
||||
inherits from CogneeApiError.
|
||||
inherits from CogneeValidationError.
|
||||
|
||||
Public methods:
|
||||
|
||||
|
|
@ -49,11 +49,11 @@ class EntityNotFoundError(CogneeApiError):
|
|||
# super().__init__(message, name, status_code) :TODO: This is not an error anymore with the dynamic exception handling therefore we shouldn't log error
|
||||
|
||||
|
||||
class EntityAlreadyExistsError(CogneeApiError):
|
||||
class EntityAlreadyExistsError(CogneeValidationError):
|
||||
"""
|
||||
Represents an error when an entity creation is attempted but the entity already exists.
|
||||
|
||||
This class is derived from CogneeApiError and is used to signal a conflict in operations
|
||||
This class is derived from CogneeValidationError and is used to signal a conflict in operations
|
||||
involving resource creation.
|
||||
"""
|
||||
|
||||
|
|
@ -66,11 +66,11 @@ class EntityAlreadyExistsError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class NodesetFilterNotSupportedError(CogneeApiError):
|
||||
class NodesetFilterNotSupportedError(CogneeConfigurationError):
|
||||
"""
|
||||
Raise an exception when a nodeset filter is not supported by the current database.
|
||||
|
||||
This exception inherits from `CogneeApiError` and is designed to provide information
|
||||
This exception inherits from `CogneeConfigurationError` and is designed to provide information
|
||||
about the specific issue of unsupported nodeset filters in the context of graph
|
||||
databases.
|
||||
"""
|
||||
|
|
@ -84,3 +84,51 @@ class NodesetFilterNotSupportedError(CogneeApiError):
|
|||
self.message = message
|
||||
self.name = name
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
class EmbeddingException(CogneeConfigurationError):
|
||||
"""
|
||||
Custom exception for handling embedding-related errors.
|
||||
|
||||
This exception class is designed to indicate issues specifically related to embeddings
|
||||
within the application. It extends the base exception class CogneeConfigurationError allows
|
||||
for customization of the error message, name, and status code.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Embedding Exception.",
|
||||
name: str = "EmbeddingException",
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class MissingQueryParameterError(CogneeValidationError):
|
||||
"""
|
||||
Raised when neither 'query_text' nor 'query_vector' is provided,
|
||||
and at least one is required to perform the operation.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str = "MissingQueryParameterError",
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
):
|
||||
message = "One of query_text or query_vector must be provided!"
|
||||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class MutuallyExclusiveQueryParametersError(CogneeValidationError):
|
||||
"""
|
||||
Raised when both 'text' and 'embedding' are provided to the search function,
|
||||
but only one type of input is allowed at a time.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str = "MutuallyExclusiveQueryParametersError",
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
):
|
||||
message = "The search function accepts either text or embedding as input, but not both."
|
||||
super().__init__(message, name, status_code)
|
||||
|
|
|
|||
|
|
@ -3,11 +3,16 @@
|
|||
This module defines custom exceptions for Neptune Analytics operations.
|
||||
"""
|
||||
|
||||
from cognee.exceptions import CogneeApiError
|
||||
from cognee.exceptions import (
|
||||
CogneeSystemError,
|
||||
CogneeTransientError,
|
||||
CogneeValidationError,
|
||||
CogneeConfigurationError,
|
||||
)
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class NeptuneAnalyticsError(CogneeApiError):
|
||||
class NeptuneAnalyticsError(CogneeSystemError):
|
||||
"""Base exception for Neptune Analytics operations."""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -19,7 +24,7 @@ class NeptuneAnalyticsError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class NeptuneAnalyticsConnectionError(NeptuneAnalyticsError):
|
||||
class NeptuneAnalyticsConnectionError(CogneeTransientError):
|
||||
"""Exception raised when connection to Neptune Analytics fails."""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -31,7 +36,7 @@ class NeptuneAnalyticsConnectionError(NeptuneAnalyticsError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class NeptuneAnalyticsQueryError(NeptuneAnalyticsError):
|
||||
class NeptuneAnalyticsQueryError(CogneeValidationError):
|
||||
"""Exception raised when a query execution fails."""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -43,7 +48,7 @@ class NeptuneAnalyticsQueryError(NeptuneAnalyticsError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class NeptuneAnalyticsAuthenticationError(NeptuneAnalyticsError):
|
||||
class NeptuneAnalyticsAuthenticationError(CogneeConfigurationError):
|
||||
"""Exception raised when authentication with Neptune Analytics fails."""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -55,7 +60,7 @@ class NeptuneAnalyticsAuthenticationError(NeptuneAnalyticsError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class NeptuneAnalyticsConfigurationError(NeptuneAnalyticsError):
|
||||
class NeptuneAnalyticsConfigurationError(CogneeConfigurationError):
|
||||
"""Exception raised when Neptune Analytics configuration is invalid."""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -67,7 +72,7 @@ class NeptuneAnalyticsConfigurationError(NeptuneAnalyticsError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class NeptuneAnalyticsTimeoutError(NeptuneAnalyticsError):
|
||||
class NeptuneAnalyticsTimeoutError(CogneeTransientError):
|
||||
"""Exception raised when a Neptune Analytics operation times out."""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -79,7 +84,7 @@ class NeptuneAnalyticsTimeoutError(NeptuneAnalyticsError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class NeptuneAnalyticsThrottlingError(NeptuneAnalyticsError):
|
||||
class NeptuneAnalyticsThrottlingError(CogneeTransientError):
|
||||
"""Exception raised when requests are throttled by Neptune Analytics."""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -91,7 +96,7 @@ class NeptuneAnalyticsThrottlingError(NeptuneAnalyticsError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class NeptuneAnalyticsResourceNotFoundError(NeptuneAnalyticsError):
|
||||
class NeptuneAnalyticsResourceNotFoundError(CogneeValidationError):
|
||||
"""Exception raised when a Neptune Analytics resource is not found."""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -103,7 +108,7 @@ class NeptuneAnalyticsResourceNotFoundError(NeptuneAnalyticsError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class NeptuneAnalyticsInvalidParameterError(NeptuneAnalyticsError):
|
||||
class NeptuneAnalyticsInvalidParameterError(CogneeValidationError):
|
||||
"""Exception raised when invalid parameters are provided to Neptune Analytics."""
|
||||
|
||||
def __init__(
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from typing import List, Dict, Any, Optional, Tuple, Type, Union
|
|||
|
||||
from falkordb import FalkorDB
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.infrastructure.databases.exceptions import MissingQueryParameterError
|
||||
from cognee.infrastructure.databases.graph.graph_db_interface import (
|
||||
GraphDBInterface,
|
||||
record_graph_changes,
|
||||
|
|
@ -721,7 +721,7 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface):
|
|||
Returns the search results as a result set from the graph database.
|
||||
"""
|
||||
if query_text is None and query_vector is None:
|
||||
raise InvalidValueError(message="One of query_text or query_vector must be provided!")
|
||||
raise MissingQueryParameterError()
|
||||
|
||||
if query_text and not query_vector:
|
||||
query_vector = (await self.embed_data([query_text]))[0]
|
||||
|
|
|
|||
|
|
@ -5,7 +5,8 @@ import json
|
|||
from typing import List, Optional, Any, Dict, Type, Tuple
|
||||
from uuid import UUID
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.infrastructure.databases.exceptions import MissingQueryParameterError
|
||||
from cognee.infrastructure.databases.exceptions import MutuallyExclusiveQueryParametersError
|
||||
from cognee.infrastructure.databases.graph.neptune_driver.adapter import NeptuneGraphDB
|
||||
from cognee.infrastructure.databases.vector.vector_db_interface import VectorDBInterface
|
||||
from cognee.infrastructure.engine import DataPoint
|
||||
|
|
@ -274,11 +275,9 @@ class NeptuneAnalyticsAdapter(NeptuneGraphDB, VectorDBInterface):
|
|||
limit = self._TOPK_UPPER_BOUND
|
||||
|
||||
if query_vector and query_text:
|
||||
raise InvalidValueError(
|
||||
message="The search function accepts either text or embedding as input, but not both."
|
||||
)
|
||||
raise MutuallyExclusiveQueryParametersError()
|
||||
elif query_text is None and query_vector is None:
|
||||
raise InvalidValueError(message="One of query_text or query_vector must be provided!")
|
||||
raise MissingQueryParameterError()
|
||||
elif query_vector:
|
||||
embedding = query_vector
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -4,13 +4,13 @@ from uuid import UUID
|
|||
from typing import List, Optional
|
||||
from chromadb import AsyncHttpClient, Settings
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.modules.storage.utils import get_own_properties
|
||||
from cognee.infrastructure.engine import DataPoint
|
||||
from cognee.infrastructure.engine.utils import parse_id
|
||||
from cognee.infrastructure.databases.vector.exceptions import CollectionNotFoundError
|
||||
from cognee.infrastructure.databases.vector.models.ScoredResult import ScoredResult
|
||||
from cognee.infrastructure.databases.exceptions import MissingQueryParameterError
|
||||
|
||||
from ..embeddings.EmbeddingEngine import EmbeddingEngine
|
||||
from ..vector_db_interface import VectorDBInterface
|
||||
|
|
@ -378,7 +378,7 @@ class ChromaDBAdapter(VectorDBInterface):
|
|||
Returns a list of ScoredResult instances representing the search results.
|
||||
"""
|
||||
if query_text is None and query_vector is None:
|
||||
raise InvalidValueError(message="One of query_text or query_vector must be provided!")
|
||||
raise MissingQueryParameterError()
|
||||
|
||||
if query_text and not query_vector:
|
||||
query_vector = (await self.embedding_engine.embed_text([query_text]))[0]
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import math
|
|||
import litellm
|
||||
import os
|
||||
from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine
|
||||
from cognee.infrastructure.databases.exceptions.EmbeddingException import EmbeddingException
|
||||
from cognee.infrastructure.databases.exceptions import EmbeddingException
|
||||
from cognee.infrastructure.llm.tokenizer.Gemini import (
|
||||
GeminiTokenizer,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
from fastapi import status
|
||||
from cognee.exceptions import CriticalError
|
||||
from cognee.exceptions import CogneeValidationError
|
||||
|
||||
|
||||
class CollectionNotFoundError(CriticalError):
|
||||
class CollectionNotFoundError(CogneeValidationError):
|
||||
"""
|
||||
Represents an error that occurs when a requested collection cannot be found.
|
||||
|
||||
This class extends the CriticalError to handle specific cases where a requested
|
||||
This class extends the CogneeValidationError to handle specific cases where a requested
|
||||
collection is unavailable. It can be initialized with a custom message and allows for
|
||||
logging options including log level and whether to log the error.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from pydantic import BaseModel
|
|||
from lancedb.pydantic import LanceModel, Vector
|
||||
from typing import Generic, List, Optional, TypeVar, Union, get_args, get_origin, get_type_hints
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.infrastructure.databases.exceptions import MissingQueryParameterError
|
||||
from cognee.infrastructure.engine import DataPoint
|
||||
from cognee.infrastructure.engine.utils import parse_id
|
||||
from cognee.infrastructure.files.storage import get_file_storage
|
||||
|
|
@ -228,7 +228,7 @@ class LanceDBAdapter(VectorDBInterface):
|
|||
normalized: bool = True,
|
||||
):
|
||||
if query_text is None and query_vector is None:
|
||||
raise InvalidValueError(message="One of query_text or query_vector must be provided!")
|
||||
raise MissingQueryParameterError()
|
||||
|
||||
if query_text and not query_vector:
|
||||
query_vector = (await self.embedding_engine.embed_text([query_text]))[0]
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ from sqlalchemy.exc import ProgrammingError
|
|||
from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential
|
||||
from asyncpg import DeadlockDetectedError, DuplicateTableError, UniqueViolationError
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.infrastructure.engine import DataPoint
|
||||
from cognee.infrastructure.engine.utils import parse_id
|
||||
|
|
@ -17,6 +17,7 @@ from cognee.infrastructure.databases.relational import get_relational_engine
|
|||
|
||||
from distributed.utils import override_distributed
|
||||
from distributed.tasks.queued_add_data_points import queued_add_data_points
|
||||
from cognee.infrastructure.databases.exceptions import MissingQueryParameterError
|
||||
|
||||
from ...relational.ModelBase import Base
|
||||
from ...relational.sqlalchemy.SqlAlchemyAdapter import SQLAlchemyAdapter
|
||||
|
|
@ -275,7 +276,7 @@ class PGVectorAdapter(SQLAlchemyAdapter, VectorDBInterface):
|
|||
return metadata.tables[collection_name]
|
||||
else:
|
||||
raise CollectionNotFoundError(
|
||||
f"Collection '{collection_name}' not found!", log_level="DEBUG"
|
||||
f"Collection '{collection_name}' not found!",
|
||||
)
|
||||
|
||||
async def retrieve(self, collection_name: str, data_point_ids: List[str]):
|
||||
|
|
@ -302,7 +303,7 @@ class PGVectorAdapter(SQLAlchemyAdapter, VectorDBInterface):
|
|||
with_vector: bool = False,
|
||||
) -> List[ScoredResult]:
|
||||
if query_text is None and query_vector is None:
|
||||
raise InvalidValueError(message="One of query_text or query_vector must be provided!")
|
||||
raise MissingQueryParameterError()
|
||||
|
||||
if query_text and not query_vector:
|
||||
query_vector = (await self.embedding_engine.embed_text([query_text]))[0]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,33 @@
|
|||
from cognee.exceptions.exceptions import CriticalError
|
||||
from cognee.exceptions.exceptions import CogneeValidationError
|
||||
|
||||
|
||||
class ContentPolicyFilterError(CriticalError):
|
||||
class ContentPolicyFilterError(CogneeValidationError):
|
||||
pass
|
||||
|
||||
|
||||
class LLMAPIKeyNotSetError(CogneeValidationError):
|
||||
"""
|
||||
Raised when the LLM API key is not set in the configuration.
|
||||
"""
|
||||
|
||||
def __init__(self, message: str = "LLM API key is not set."):
|
||||
super().__init__(message=message, name="LLMAPIKeyNotSetError")
|
||||
|
||||
|
||||
class UnsupportedLLMProviderError(CogneeValidationError):
|
||||
"""
|
||||
Raised when an unsupported LLM provider is specified in the configuration.
|
||||
"""
|
||||
|
||||
def __init__(self, provider: str):
|
||||
message = f"Unsupported LLM provider: {provider}"
|
||||
super().__init__(message=message, name="UnsupportedLLMProviderError")
|
||||
|
||||
|
||||
class MissingSystemPromptPathError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
name: str = "MissingSystemPromptPathError",
|
||||
):
|
||||
message = "No system prompt path provided."
|
||||
super().__init__(message, name)
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from typing import Type
|
|||
from pydantic import BaseModel
|
||||
import instructor
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.infrastructure.llm.exceptions import MissingSystemPromptPathError
|
||||
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.llm_interface import (
|
||||
LLMInterface,
|
||||
)
|
||||
|
|
@ -89,7 +89,7 @@ class AnthropicAdapter(LLMInterface):
|
|||
if not text_input:
|
||||
text_input = "No user input provided."
|
||||
if not system_prompt:
|
||||
raise InvalidValueError(message="No system prompt path provided.")
|
||||
raise MissingSystemPromptPathError()
|
||||
|
||||
system_prompt = LLMGateway.read_query_prompt(system_prompt)
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from litellm import acompletion, JSONSchemaValidationError
|
|||
|
||||
from cognee.shared.logging_utils import get_logger
|
||||
from cognee.modules.observability.get_observe import get_observe
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.infrastructure.llm.exceptions import MissingSystemPromptPathError
|
||||
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.llm_interface import (
|
||||
LLMInterface,
|
||||
)
|
||||
|
|
@ -118,7 +118,7 @@ class GeminiAdapter(LLMInterface):
|
|||
"""
|
||||
Format and display the prompt for a user query.
|
||||
|
||||
Raises an InvalidValueError if no system prompt is provided.
|
||||
Raises an MissingQueryParameterError if no system prompt is provided.
|
||||
|
||||
Parameters:
|
||||
-----------
|
||||
|
|
@ -135,7 +135,7 @@ class GeminiAdapter(LLMInterface):
|
|||
if not text_input:
|
||||
text_input = "No user input provided."
|
||||
if not system_prompt:
|
||||
raise InvalidValueError(message="No system prompt path provided.")
|
||||
raise MissingSystemPromptPathError()
|
||||
system_prompt = LLMGateway.read_query_prompt(system_prompt)
|
||||
|
||||
formatted_prompt = (
|
||||
|
|
|
|||
|
|
@ -2,11 +2,14 @@
|
|||
|
||||
from enum import Enum
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.infrastructure.llm import get_llm_config
|
||||
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.ollama.adapter import (
|
||||
OllamaAPIAdapter,
|
||||
)
|
||||
from cognee.infrastructure.llm.exceptions import (
|
||||
LLMAPIKeyNotSetError,
|
||||
UnsupportedLLMProviderError,
|
||||
)
|
||||
|
||||
|
||||
# Define an Enum for LLM Providers
|
||||
|
|
@ -35,7 +38,7 @@ def get_llm_client():
|
|||
|
||||
This function retrieves the configuration for the LLM provider and model, and
|
||||
initializes the appropriate LLM client adapter accordingly. It raises an
|
||||
InvalidValueError if the LLM API key is not set for certain providers or if the provider
|
||||
LLMAPIKeyNotSetError if the LLM API key is not set for certain providers or if the provider
|
||||
is unsupported.
|
||||
|
||||
Returns:
|
||||
|
|
@ -59,7 +62,7 @@ def get_llm_client():
|
|||
|
||||
if provider == LLMProvider.OPENAI:
|
||||
if llm_config.llm_api_key is None:
|
||||
raise InvalidValueError(message="LLM API key is not set.")
|
||||
raise LLMAPIKeyNotSetError()
|
||||
|
||||
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.openai.adapter import (
|
||||
OpenAIAdapter,
|
||||
|
|
@ -80,7 +83,7 @@ def get_llm_client():
|
|||
|
||||
elif provider == LLMProvider.OLLAMA:
|
||||
if llm_config.llm_api_key is None:
|
||||
raise InvalidValueError(message="LLM API key is not set.")
|
||||
raise LLMAPIKeyNotSetError()
|
||||
|
||||
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.generic_llm_api.adapter import (
|
||||
GenericAPIAdapter,
|
||||
|
|
@ -103,7 +106,7 @@ def get_llm_client():
|
|||
|
||||
elif provider == LLMProvider.CUSTOM:
|
||||
if llm_config.llm_api_key is None:
|
||||
raise InvalidValueError(message="LLM API key is not set.")
|
||||
raise LLMAPIKeyNotSetError()
|
||||
|
||||
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.generic_llm_api.adapter import (
|
||||
GenericAPIAdapter,
|
||||
|
|
@ -122,7 +125,7 @@ def get_llm_client():
|
|||
|
||||
elif provider == LLMProvider.GEMINI:
|
||||
if llm_config.llm_api_key is None:
|
||||
raise InvalidValueError(message="LLM API key is not set.")
|
||||
raise LLMAPIKeyNotSetError()
|
||||
|
||||
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.gemini.adapter import (
|
||||
GeminiAdapter,
|
||||
|
|
@ -138,4 +141,4 @@ def get_llm_client():
|
|||
)
|
||||
|
||||
else:
|
||||
raise InvalidValueError(message=f"Unsupported LLM provider: {provider}")
|
||||
raise UnsupportedLLMProviderError(provider)
|
||||
|
|
|
|||
|
|
@ -7,12 +7,14 @@ from openai import ContentFilterFinishReasonError
|
|||
from litellm.exceptions import ContentPolicyViolationError
|
||||
from instructor.exceptions import InstructorRetryException
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.infrastructure.llm.LLMGateway import LLMGateway
|
||||
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.llm_interface import (
|
||||
LLMInterface,
|
||||
)
|
||||
from cognee.infrastructure.llm.exceptions import ContentPolicyFilterError
|
||||
from cognee.infrastructure.llm.exceptions import (
|
||||
ContentPolicyFilterError,
|
||||
MissingSystemPromptPathError,
|
||||
)
|
||||
from cognee.infrastructure.files.utils.open_data_file import open_data_file
|
||||
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.rate_limiter import (
|
||||
rate_limit_async,
|
||||
|
|
@ -308,7 +310,7 @@ class OpenAIAdapter(LLMInterface):
|
|||
Format and display the prompt for a user query.
|
||||
|
||||
This method formats the prompt using the provided user input and system prompt,
|
||||
returning a string representation. Raises InvalidValueError if the system prompt is not
|
||||
returning a string representation. Raises MissingSystemPromptPathError if the system prompt is not
|
||||
provided.
|
||||
|
||||
Parameters:
|
||||
|
|
@ -325,7 +327,7 @@ class OpenAIAdapter(LLMInterface):
|
|||
if not text_input:
|
||||
text_input = "No user input provided."
|
||||
if not system_prompt:
|
||||
raise InvalidValueError(message="No system prompt path provided.")
|
||||
raise MissingSystemPromptPathError()
|
||||
system_prompt = LLMGateway.read_query_prompt(system_prompt)
|
||||
|
||||
formatted_prompt = (
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
from cognee.exceptions import CogneeApiError
|
||||
from cognee.exceptions import (
|
||||
CogneeValidationError,
|
||||
CogneeConfigurationError,
|
||||
)
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class UnstructuredLibraryImportError(CogneeApiError):
|
||||
class UnstructuredLibraryImportError(CogneeConfigurationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Import error. Unstructured library is not installed.",
|
||||
|
|
@ -12,7 +15,7 @@ class UnstructuredLibraryImportError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class UnauthorizedDataAccessError(CogneeApiError):
|
||||
class UnauthorizedDataAccessError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "User does not have permission to access this data.",
|
||||
|
|
@ -22,7 +25,7 @@ class UnauthorizedDataAccessError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class DatasetNotFoundError(CogneeApiError):
|
||||
class DatasetNotFoundError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Dataset not found.",
|
||||
|
|
@ -32,7 +35,7 @@ class DatasetNotFoundError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class DatasetTypeError(CogneeApiError):
|
||||
class DatasetTypeError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Dataset type not supported.",
|
||||
|
|
@ -40,3 +43,13 @@ class DatasetTypeError(CogneeApiError):
|
|||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class InvalidTableAttributeError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "The provided data object is missing the required '__tablename__' attribute.",
|
||||
name: str = "InvalidTableAttributeError",
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from cognee.exceptions import InvalidAttributeError
|
||||
from cognee.modules.data.exceptions.exceptions import InvalidTableAttributeError
|
||||
from cognee.modules.data.models import Data
|
||||
from cognee.infrastructure.databases.relational import get_relational_engine
|
||||
|
||||
|
|
@ -13,9 +13,7 @@ async def delete_data(data: Data):
|
|||
ValueError: If the data object is invalid.
|
||||
"""
|
||||
if not hasattr(data, "__tablename__"):
|
||||
raise InvalidAttributeError(
|
||||
message="The provided data object is missing the required '__tablename__' attribute."
|
||||
)
|
||||
raise InvalidTableAttributeError()
|
||||
|
||||
db_engine = get_relational_engine()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from cognee.exceptions import CogneeApiError
|
||||
from cognee.exceptions import CogneeSystemError
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class PyPdfInternalError(CogneeApiError):
|
||||
class PyPdfInternalError(CogneeSystemError):
|
||||
"""Internal pypdf error"""
|
||||
|
||||
def __init__(
|
||||
|
|
|
|||
|
|
@ -2,8 +2,11 @@ import time
|
|||
from cognee.shared.logging_utils import get_logger
|
||||
from typing import List, Dict, Union, Optional, Type
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.modules.graph.exceptions import EntityNotFoundError, EntityAlreadyExistsError
|
||||
from cognee.modules.graph.exceptions import (
|
||||
EntityNotFoundError,
|
||||
EntityAlreadyExistsError,
|
||||
InvalidDimensionsError,
|
||||
)
|
||||
from cognee.infrastructure.databases.graph.graph_db_interface import GraphDBInterface
|
||||
from cognee.modules.graph.cognee_graph.CogneeGraphElements import Node, Edge
|
||||
from cognee.modules.graph.cognee_graph.CogneeAbstractGraph import CogneeAbstractGraph
|
||||
|
|
@ -66,8 +69,7 @@ class CogneeGraph(CogneeAbstractGraph):
|
|||
node_name: Optional[List[str]] = None,
|
||||
) -> None:
|
||||
if node_dimension < 1 or edge_dimension < 1:
|
||||
raise InvalidValueError(message="Dimensions must be positive integers")
|
||||
|
||||
raise InvalidDimensionsError()
|
||||
try:
|
||||
import time
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import numpy as np
|
||||
from typing import List, Dict, Optional, Any, Union
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.modules.graph.exceptions import InvalidDimensionsError, DimensionOutOfRangeError
|
||||
|
||||
|
||||
class Node:
|
||||
|
|
@ -24,7 +23,7 @@ class Node:
|
|||
self, node_id: str, attributes: Optional[Dict[str, Any]] = None, dimension: int = 1
|
||||
):
|
||||
if dimension <= 0:
|
||||
raise InvalidValueError(message="Dimension must be a positive integer")
|
||||
raise InvalidDimensionsError()
|
||||
self.id = node_id
|
||||
self.attributes = attributes if attributes is not None else {}
|
||||
self.attributes["vector_distance"] = float("inf")
|
||||
|
|
@ -58,9 +57,7 @@ class Node:
|
|||
|
||||
def is_node_alive_in_dimension(self, dimension: int) -> bool:
|
||||
if dimension < 0 or dimension >= len(self.status):
|
||||
raise InvalidValueError(
|
||||
message=f"Dimension {dimension} is out of range. Valid range is 0 to {len(self.status) - 1}."
|
||||
)
|
||||
raise DimensionOutOfRangeError(dimension=dimension, max_index=len(self.status) - 1)
|
||||
return self.status[dimension] == 1
|
||||
|
||||
def add_attribute(self, key: str, value: Any) -> None:
|
||||
|
|
@ -110,7 +107,7 @@ class Edge:
|
|||
dimension: int = 1,
|
||||
):
|
||||
if dimension <= 0:
|
||||
raise InvalidValueError(message="Dimensions must be a positive integer.")
|
||||
raise InvalidDimensionsError()
|
||||
self.node1 = node1
|
||||
self.node2 = node2
|
||||
self.attributes = attributes if attributes is not None else {}
|
||||
|
|
@ -120,9 +117,7 @@ class Edge:
|
|||
|
||||
def is_edge_alive_in_dimension(self, dimension: int) -> bool:
|
||||
if dimension < 0 or dimension >= len(self.status):
|
||||
raise InvalidValueError(
|
||||
message=f"Dimension {dimension} is out of range. Valid range is 0 to {len(self.status) - 1}."
|
||||
)
|
||||
raise DimensionOutOfRangeError(dimension=dimension, max_index=len(self.status) - 1)
|
||||
return self.status[dimension] == 1
|
||||
|
||||
def add_attribute(self, key: str, value: Any) -> None:
|
||||
|
|
|
|||
|
|
@ -7,4 +7,6 @@ This module defines a set of exceptions for handling various graph errors
|
|||
from .exceptions import (
|
||||
EntityNotFoundError,
|
||||
EntityAlreadyExistsError,
|
||||
InvalidDimensionsError,
|
||||
DimensionOutOfRangeError,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from cognee.exceptions import CogneeApiError
|
||||
from cognee.exceptions import CogneeValidationError
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class EntityNotFoundError(CogneeApiError):
|
||||
class EntityNotFoundError(CogneeValidationError):
|
||||
"""Database returns nothing"""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -14,7 +14,7 @@ class EntityNotFoundError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class EntityAlreadyExistsError(CogneeApiError):
|
||||
class EntityAlreadyExistsError(CogneeValidationError):
|
||||
"""Conflict detected, like trying to create a resource that already exists"""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -24,3 +24,25 @@ class EntityAlreadyExistsError(CogneeApiError):
|
|||
status_code=status.HTTP_409_CONFLICT,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class InvalidDimensionsError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
name: str = "InvalidDimensionsError",
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
):
|
||||
message = "Dimensions must be positive integers."
|
||||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class DimensionOutOfRangeError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
dimension: int,
|
||||
max_index: int,
|
||||
name: str = "DimensionOutOfRangeError",
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
):
|
||||
message = f"Dimension {dimension} is out of range. Valid range is 0 to {max_index}."
|
||||
super().__init__(message, name, status_code)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from cognee.exceptions import CogneeApiError
|
||||
from cognee.exceptions import CogneeValidationError
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class IngestionError(CogneeApiError):
|
||||
class IngestionError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Type of data sent to classify not supported.",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from cognee.exceptions import CogneeApiError
|
||||
from cognee.exceptions import CogneeSystemError
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class OntologyInitializationError(CogneeApiError):
|
||||
class OntologyInitializationError(CogneeSystemError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Ontology initialization failed",
|
||||
|
|
@ -12,7 +12,7 @@ class OntologyInitializationError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class FindClosestMatchError(CogneeApiError):
|
||||
class FindClosestMatchError(CogneeSystemError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Error in find_closest_match",
|
||||
|
|
@ -22,7 +22,7 @@ class FindClosestMatchError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class GetSubgraphError(CogneeApiError):
|
||||
class GetSubgraphError(CogneeSystemError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Failed to retrieve subgraph",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from cognee.exceptions import CogneeApiError
|
||||
from cognee.exceptions import CogneeSystemError
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class PipelineRunFailedError(CogneeApiError):
|
||||
class PipelineRunFailedError(CogneeSystemError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Pipeline run failed.",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from fastapi import status
|
||||
from cognee.exceptions import CogneeApiError, CriticalError
|
||||
from cognee.exceptions import CogneeValidationError, CogneeSystemError
|
||||
|
||||
|
||||
class SearchTypeNotSupported(CogneeApiError):
|
||||
class SearchTypeNotSupported(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "CYPHER search type not supported by the adapter.",
|
||||
|
|
@ -12,7 +12,7 @@ class SearchTypeNotSupported(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class CypherSearchError(CogneeApiError):
|
||||
class CypherSearchError(CogneeSystemError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "An error occurred during the execution of the Cypher query.",
|
||||
|
|
@ -22,11 +22,17 @@ class CypherSearchError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class NoDataError(CriticalError):
|
||||
message: str = "No data found in the system, please add data first."
|
||||
class NoDataError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "No data found in the system, please add data first.",
|
||||
name: str = "NoDataError",
|
||||
status_code: int = status.HTTP_404_NOT_FOUND,
|
||||
):
|
||||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class CollectionDistancesNotFoundError(CogneeApiError):
|
||||
class CollectionDistancesNotFoundError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "No collection distances found for the given query.",
|
||||
|
|
|
|||
7
cognee/modules/search/exceptions/__init__.py
Normal file
7
cognee/modules/search/exceptions/__init__.py
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
"""
|
||||
Custom exceptions for the Cognee API.
|
||||
|
||||
This module defines a set of exceptions for handling various data errors
|
||||
"""
|
||||
|
||||
from .exceptions import UnsupportedSearchTypeError
|
||||
15
cognee/modules/search/exceptions/exceptions.py
Normal file
15
cognee/modules/search/exceptions/exceptions.py
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
from cognee.exceptions import (
|
||||
CogneeValidationError,
|
||||
)
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class UnsupportedSearchTypeError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
search_type: str,
|
||||
name: str = "UnsupportedSearchTypeError",
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
):
|
||||
message = f"Unsupported search type: {search_type}"
|
||||
super().__init__(message, name, status_code)
|
||||
|
|
@ -3,9 +3,8 @@ import json
|
|||
import asyncio
|
||||
from uuid import UUID
|
||||
from typing import Callable, List, Optional, Type, Union
|
||||
|
||||
from cognee.modules.search.exceptions import UnsupportedSearchTypeError
|
||||
from cognee.context_global_variables import set_database_global_context_variables
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.modules.retrieval.chunks_retriever import ChunksRetriever
|
||||
from cognee.modules.retrieval.insights_retriever import InsightsRetriever
|
||||
from cognee.modules.retrieval.summaries_retriever import SummariesRetriever
|
||||
|
|
@ -136,7 +135,7 @@ async def specific_search(
|
|||
search_task = search_tasks.get(query_type)
|
||||
|
||||
if search_task is None:
|
||||
raise InvalidValueError(message=f"Unsupported search type: {query_type}")
|
||||
raise UnsupportedSearchTypeError(str(query_type))
|
||||
|
||||
send_telemetry("cognee.search EXECUTION STARTED", user.id)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from cognee.exceptions import CogneeApiError
|
||||
from cognee.exceptions import CogneeValidationError
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class RoleNotFoundError(CogneeApiError):
|
||||
class RoleNotFoundError(CogneeValidationError):
|
||||
"""User group not found"""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -14,7 +14,7 @@ class RoleNotFoundError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class TenantNotFoundError(CogneeApiError):
|
||||
class TenantNotFoundError(CogneeValidationError):
|
||||
"""User group not found"""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -26,7 +26,7 @@ class TenantNotFoundError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class UserNotFoundError(CogneeApiError):
|
||||
class UserNotFoundError(CogneeValidationError):
|
||||
"""User not found"""
|
||||
|
||||
def __init__(
|
||||
|
|
@ -38,7 +38,7 @@ class UserNotFoundError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class PermissionDeniedError(CogneeApiError):
|
||||
class PermissionDeniedError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "User does not have permission on documents.",
|
||||
|
|
@ -48,7 +48,7 @@ class PermissionDeniedError(CogneeApiError):
|
|||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class PermissionNotFoundError(CogneeApiError):
|
||||
class PermissionNotFoundError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Permission type does not exist.",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from cognee.exceptions import CogneeApiError
|
||||
from cognee.exceptions import CogneeValidationError
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class IngestionError(CogneeApiError):
|
||||
class IngestionError(CogneeValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Failed to load data.",
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
from cognee.exceptions import CogneeApiError
|
||||
from cognee.exceptions import CogneeValidationError
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class NoRelevantDataError(CogneeApiError):
|
||||
class NoRelevantDataError(CogneeValidationError):
|
||||
"""
|
||||
Represents an error when no relevant data is found during a search. This class is a
|
||||
subclass of CogneeApiError.
|
||||
subclass of CogneeValidationError.
|
||||
|
||||
Public methods:
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from cognee.modules.data.processing.document_types import (
|
|||
)
|
||||
from cognee.modules.engine.models.node_set import NodeSet
|
||||
from cognee.modules.engine.utils.generate_node_id import generate_node_id
|
||||
from cognee.tasks.documents.exceptions import WrongDataDocumentInputError
|
||||
|
||||
EXTENSION_TO_DOCUMENT_CLASS = {
|
||||
"pdf": PdfDocument, # Text documents
|
||||
|
|
@ -111,6 +112,9 @@ async def classify_documents(data_documents: list[Data]) -> list[Document]:
|
|||
- list[Document]: A list of Document objects created based on the classified data
|
||||
documents.
|
||||
"""
|
||||
if not isinstance(data_documents, list):
|
||||
raise WrongDataDocumentInputError("data_documents")
|
||||
|
||||
documents = []
|
||||
for data_item in data_documents:
|
||||
document = EXTENSION_TO_DOCUMENT_CLASS[data_item.extension](
|
||||
|
|
|
|||
11
cognee/tasks/documents/exceptions/__init__.py
Normal file
11
cognee/tasks/documents/exceptions/__init__.py
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
"""
|
||||
Custom exceptions for the Cognee API.
|
||||
|
||||
This module defines a set of exceptions for handling various data errors
|
||||
"""
|
||||
|
||||
from .exceptions import (
|
||||
WrongDataDocumentInputError,
|
||||
InvalidChunkSizeError,
|
||||
InvalidChunkerError,
|
||||
)
|
||||
36
cognee/tasks/documents/exceptions/exceptions.py
Normal file
36
cognee/tasks/documents/exceptions/exceptions.py
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
from cognee.exceptions import (
|
||||
CogneeValidationError,
|
||||
CogneeConfigurationError,
|
||||
)
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class WrongDataDocumentInputError(CogneeValidationError):
|
||||
"""Raised when a wrong data document is provided."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
field: str,
|
||||
name: str = "WrongDataDocumentInputError",
|
||||
status_code: int = status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
):
|
||||
message = f"Missing of invalid parameter: '{field}'."
|
||||
super().__init__(message, name, status_code)
|
||||
|
||||
|
||||
class InvalidChunkSizeError(CogneeValidationError):
|
||||
def __init__(self, value):
|
||||
super().__init__(
|
||||
message=f"max_chunk_size must be a positive integer (got {value}).",
|
||||
name="InvalidChunkSizeError",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class InvalidChunkerError(CogneeValidationError):
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
message="chunker must be a valid Chunker class.",
|
||||
name="InvalidChunkerError",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
|
@ -8,6 +8,7 @@ from cognee.modules.data.models import Data
|
|||
from cognee.infrastructure.databases.relational import get_relational_engine
|
||||
from cognee.modules.chunking.TextChunker import TextChunker
|
||||
from cognee.modules.chunking.Chunker import Chunker
|
||||
from cognee.tasks.documents.exceptions import InvalidChunkSizeError, InvalidChunkerError
|
||||
|
||||
|
||||
async def update_document_token_count(document_id: UUID, token_count: int) -> None:
|
||||
|
|
@ -37,6 +38,13 @@ async def extract_chunks_from_documents(
|
|||
- The `read` method of the `Document` class must be implemented to support the chunking operation.
|
||||
- The `chunker` parameter determines the chunking logic and should align with the document type.
|
||||
"""
|
||||
if not isinstance(max_chunk_size, int) or max_chunk_size <= 0:
|
||||
raise InvalidChunkSizeError(max_chunk_size)
|
||||
if not isinstance(chunker, type):
|
||||
raise InvalidChunkerError()
|
||||
if not hasattr(chunker, "read"):
|
||||
raise InvalidChunkerError()
|
||||
|
||||
for document in documents:
|
||||
document_token_count = 0
|
||||
|
||||
|
|
@ -48,5 +56,3 @@ async def extract_chunks_from_documents(
|
|||
yield document_chunk
|
||||
|
||||
await update_document_token_count(document.id, document_token_count)
|
||||
|
||||
# todo rita
|
||||
|
|
|
|||
12
cognee/tasks/graph/exceptions/__init__.py
Normal file
12
cognee/tasks/graph/exceptions/__init__.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
"""
|
||||
Custom exceptions for the Cognee API.
|
||||
|
||||
This module defines a set of exceptions for handling various data errors
|
||||
"""
|
||||
|
||||
from .exceptions import (
|
||||
InvalidDataChunksError,
|
||||
InvalidGraphModelError,
|
||||
InvalidOntologyAdapterError,
|
||||
InvalidChunkGraphInputError,
|
||||
)
|
||||
41
cognee/tasks/graph/exceptions/exceptions.py
Normal file
41
cognee/tasks/graph/exceptions/exceptions.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
from cognee.exceptions import (
|
||||
CogneeValidationError,
|
||||
CogneeConfigurationError,
|
||||
)
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class InvalidDataChunksError(CogneeValidationError):
|
||||
def __init__(self, detail: str):
|
||||
super().__init__(
|
||||
message=f"Invalid data_chunks: {detail}",
|
||||
name="InvalidDataChunksError",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class InvalidGraphModelError(CogneeValidationError):
|
||||
def __init__(self, got):
|
||||
super().__init__(
|
||||
message=f"graph_model must be a subclass of BaseModel (got {got}).",
|
||||
name="InvalidGraphModelError",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class InvalidOntologyAdapterError(CogneeConfigurationError):
|
||||
def __init__(self, got):
|
||||
super().__init__(
|
||||
message=f"ontology_adapter lacks required interface (got {got}).",
|
||||
name="InvalidOntologyAdapterError",
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
class InvalidChunkGraphInputError(CogneeValidationError):
|
||||
def __init__(self, detail: str):
|
||||
super().__init__(
|
||||
message=f"Invalid chunk inputs or LLM Chunkgraphs: {detail}",
|
||||
name="InvalidChunkGraphInputError",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
|
@ -12,6 +12,12 @@ from cognee.modules.graph.utils import (
|
|||
)
|
||||
from cognee.shared.data_models import KnowledgeGraph
|
||||
from cognee.infrastructure.llm.LLMGateway import LLMGateway
|
||||
from cognee.tasks.graph.exceptions import (
|
||||
InvalidGraphModelError,
|
||||
InvalidDataChunksError,
|
||||
InvalidChunkGraphInputError,
|
||||
InvalidOntologyAdapterError,
|
||||
)
|
||||
|
||||
|
||||
async def integrate_chunk_graphs(
|
||||
|
|
@ -21,6 +27,20 @@ async def integrate_chunk_graphs(
|
|||
ontology_adapter: OntologyResolver,
|
||||
) -> List[DocumentChunk]:
|
||||
"""Updates DocumentChunk objects, integrates data points and edges into databases."""
|
||||
|
||||
if not isinstance(data_chunks, list) or not isinstance(chunk_graphs, list):
|
||||
raise InvalidChunkGraphInputError("data_chunks and chunk_graphs must be lists.")
|
||||
if len(data_chunks) != len(chunk_graphs):
|
||||
raise InvalidChunkGraphInputError(
|
||||
f"length mismatch: {len(data_chunks)} chunks vs {len(chunk_graphs)} graphs."
|
||||
)
|
||||
if not isinstance(graph_model, type) or not issubclass(graph_model, BaseModel):
|
||||
raise InvalidGraphModelError(graph_model)
|
||||
if ontology_adapter is None or not hasattr(ontology_adapter, "get_subgraph"):
|
||||
raise InvalidOntologyAdapterError(
|
||||
type(ontology_adapter).__name__ if ontology_adapter else "None"
|
||||
)
|
||||
|
||||
graph_engine = await get_graph_engine()
|
||||
|
||||
if graph_model is not KnowledgeGraph:
|
||||
|
|
@ -55,6 +75,14 @@ async def extract_graph_from_data(
|
|||
"""
|
||||
Extracts and integrates a knowledge graph from the text content of document chunks using a specified graph model.
|
||||
"""
|
||||
|
||||
if not isinstance(data_chunks, list) or not data_chunks:
|
||||
raise InvalidDataChunksError("must be a non-empty list of DocumentChunk.")
|
||||
if not all(hasattr(c, "text") for c in data_chunks):
|
||||
raise InvalidDataChunksError("each chunk must have a 'text' attribute")
|
||||
if not isinstance(graph_model, type) or not issubclass(graph_model, BaseModel):
|
||||
raise InvalidGraphModelError(graph_model)
|
||||
|
||||
chunk_graphs = await asyncio.gather(
|
||||
*[LLMGateway.extract_content_graph(chunk.text, graph_model) for chunk in data_chunks]
|
||||
)
|
||||
|
|
|
|||
8
cognee/tasks/ingestion/exceptions/__init__.py
Normal file
8
cognee/tasks/ingestion/exceptions/__init__.py
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
"""
|
||||
Custom exceptions for the Cognee API.
|
||||
|
||||
This module defines a set of exceptions for handling various application errors,
|
||||
such as System, Validation, Configuration or TransientErrors
|
||||
"""
|
||||
|
||||
from .exceptions import S3FileSystemNotFoundError
|
||||
12
cognee/tasks/ingestion/exceptions/exceptions.py
Normal file
12
cognee/tasks/ingestion/exceptions/exceptions.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from cognee.exceptions import CogneeSystemError
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class S3FileSystemNotFoundError(CogneeSystemError):
|
||||
def __init__(
|
||||
self,
|
||||
name: str = "S3FileSystemNotFoundError",
|
||||
status_code: int = status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
):
|
||||
message = "Could not find S3FileSystem."
|
||||
super().__init__(message, name, status_code)
|
||||
|
|
@ -1,6 +1,9 @@
|
|||
import os
|
||||
from urllib.parse import urlparse
|
||||
from typing import List, Union, BinaryIO
|
||||
|
||||
from cognee.tasks.ingestion.exceptions import S3FileSystemNotFoundError
|
||||
from cognee.exceptions import CogneeSystemError
|
||||
from cognee.infrastructure.files.storage.s3_config import get_s3_config
|
||||
|
||||
|
||||
|
|
@ -54,6 +57,8 @@ async def resolve_data_directories(
|
|||
else:
|
||||
s3_files.append(key)
|
||||
resolved_data.extend(s3_files)
|
||||
else:
|
||||
raise S3FileSystemNotFoundError()
|
||||
|
||||
elif os.path.isdir(item): # If it's a directory
|
||||
if include_subdirectories:
|
||||
|
|
|
|||
|
|
@ -5,9 +5,17 @@ from cognee.infrastructure.databases.graph import get_graph_engine
|
|||
from cognee.modules.graph.utils import deduplicate_nodes_and_edges, get_graph_from_model
|
||||
from .index_data_points import index_data_points
|
||||
from .index_graph_edges import index_graph_edges
|
||||
from cognee.tasks.storage.exceptions import (
|
||||
InvalidDataPointsInAddDataPointsError,
|
||||
)
|
||||
|
||||
|
||||
async def add_data_points(data_points: List[DataPoint]) -> List[DataPoint]:
|
||||
if not isinstance(data_points, list):
|
||||
raise InvalidDataPointsInAddDataPointsError("data_points must be a list.")
|
||||
if not all(isinstance(dp, DataPoint) for dp in data_points):
|
||||
raise InvalidDataPointsInAddDataPointsError("data_points: each item must be a DataPoint.")
|
||||
|
||||
nodes = []
|
||||
edges = []
|
||||
|
||||
|
|
|
|||
9
cognee/tasks/storage/exceptions/__init__.py
Normal file
9
cognee/tasks/storage/exceptions/__init__.py
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
"""
|
||||
Custom exceptions for the Cognee API.
|
||||
|
||||
This module defines a set of exceptions for handling various data errors
|
||||
"""
|
||||
|
||||
from .exceptions import (
|
||||
InvalidDataPointsInAddDataPointsError,
|
||||
)
|
||||
13
cognee/tasks/storage/exceptions/exceptions.py
Normal file
13
cognee/tasks/storage/exceptions/exceptions.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
from cognee.exceptions import (
|
||||
CogneeValidationError,
|
||||
)
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class InvalidDataPointsInAddDataPointsError(CogneeValidationError):
|
||||
def __init__(self, detail: str):
|
||||
super().__init__(
|
||||
message=f"Invalid data_points: {detail}",
|
||||
name="InvalidDataPointsInAddDataPointsError",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
from cognee.shared.logging_utils import get_logger
|
||||
|
||||
from cognee.infrastructure.databases.exceptions.EmbeddingException import EmbeddingException
|
||||
from cognee.infrastructure.databases.exceptions import EmbeddingException
|
||||
from cognee.infrastructure.databases.vector import get_vector_engine
|
||||
from cognee.infrastructure.engine import DataPoint
|
||||
|
||||
|
|
|
|||
9
cognee/tasks/summarization/exceptions/__init__.py
Normal file
9
cognee/tasks/summarization/exceptions/__init__.py
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
"""
|
||||
Custom exceptions for the Cognee API.
|
||||
|
||||
This module defines a set of exceptions for handling various data errors
|
||||
"""
|
||||
|
||||
from .exceptions import (
|
||||
InvalidSummaryInputsError,
|
||||
)
|
||||
14
cognee/tasks/summarization/exceptions/exceptions.py
Normal file
14
cognee/tasks/summarization/exceptions/exceptions.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
from cognee.exceptions import (
|
||||
CogneeValidationError,
|
||||
CogneeConfigurationError,
|
||||
)
|
||||
from fastapi import status
|
||||
|
||||
|
||||
class InvalidSummaryInputsError(CogneeValidationError):
|
||||
def __init__(self, detail: str):
|
||||
super().__init__(
|
||||
message=f"Invalid summarize_text inputs: {detail}",
|
||||
name="InvalidSummaryInputsError",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
|
@ -3,10 +3,11 @@ from typing import Type
|
|||
from uuid import uuid5
|
||||
from pydantic import BaseModel
|
||||
|
||||
from cognee.tasks.summarization.exceptions import InvalidSummaryInputsError
|
||||
from cognee.modules.chunking.models.DocumentChunk import DocumentChunk
|
||||
from cognee.infrastructure.llm.LLMGateway import LLMGateway
|
||||
from cognee.modules.cognify.config import get_cognify_config
|
||||
from .models import TextSummary
|
||||
from cognee.tasks.summarization.models import TextSummary
|
||||
|
||||
|
||||
async def summarize_text(
|
||||
|
|
@ -35,6 +36,12 @@ async def summarize_text(
|
|||
A list of TextSummary objects, each containing the summary of a corresponding
|
||||
DocumentChunk.
|
||||
"""
|
||||
|
||||
if not isinstance(data_chunks, list):
|
||||
raise InvalidSummaryInputsError("data_chunks must be a list.")
|
||||
if not all(hasattr(c, "text") for c in data_chunks):
|
||||
raise InvalidSummaryInputsError("each DocumentChunk must have a 'text' attribute.")
|
||||
|
||||
if len(data_chunks) == 0:
|
||||
return data_chunks
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ from cognee.shared.logging_utils import get_logger
|
|||
from cognee.modules.users.methods import get_default_user, create_user
|
||||
from cognee.modules.users.permissions.methods import authorized_give_permission_on_datasets
|
||||
from cognee.modules.data.methods import get_dataset_data, get_datasets_by_name
|
||||
from cognee.api.v1.delete.exceptions import DocumentNotFoundError, DatasetNotFoundError
|
||||
from cognee.api.v1.exceptions import DocumentNotFoundError, DatasetNotFoundError
|
||||
|
||||
logger = get_logger()
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
import numpy as np
|
||||
import pytest
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.modules.graph.cognee_graph.CogneeGraphElements import Edge, Node
|
||||
from cognee.modules.graph.exceptions import InvalidDimensionsError, DimensionOutOfRangeError
|
||||
|
||||
|
||||
def test_node_initialization():
|
||||
|
|
@ -16,7 +16,7 @@ def test_node_initialization():
|
|||
|
||||
def test_node_invalid_dimension():
|
||||
"""Test that initializing a Node with a non-positive dimension raises an error."""
|
||||
with pytest.raises(InvalidValueError, match="Dimension must be a positive integer"):
|
||||
with pytest.raises(InvalidDimensionsError):
|
||||
Node("node1", dimension=0)
|
||||
|
||||
|
||||
|
|
@ -69,7 +69,7 @@ def test_is_node_alive_in_dimension():
|
|||
def test_node_alive_invalid_dimension():
|
||||
"""Test that checking alive status with an invalid dimension raises an error."""
|
||||
node = Node("node1", dimension=1)
|
||||
with pytest.raises(InvalidValueError, match="Dimension 1 is out of range"):
|
||||
with pytest.raises(DimensionOutOfRangeError):
|
||||
node.is_node_alive_in_dimension(1)
|
||||
|
||||
|
||||
|
|
@ -106,7 +106,7 @@ def test_edge_invalid_dimension():
|
|||
"""Test that initializing an Edge with a non-positive dimension raises an error."""
|
||||
node1 = Node("node1")
|
||||
node2 = Node("node2")
|
||||
with pytest.raises(InvalidValueError, match="Dimensions must be a positive integer."):
|
||||
with pytest.raises(InvalidDimensionsError):
|
||||
Edge(node1, node2, dimension=0)
|
||||
|
||||
|
||||
|
|
@ -125,7 +125,7 @@ def test_edge_alive_invalid_dimension():
|
|||
node1 = Node("node1")
|
||||
node2 = Node("node2")
|
||||
edge = Edge(node1, node2, dimension=1)
|
||||
with pytest.raises(InvalidValueError, match="Dimension 1 is out of range"):
|
||||
with pytest.raises(DimensionOutOfRangeError):
|
||||
edge.is_edge_alive_in_dimension(1)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock, patch
|
|||
import pytest
|
||||
from pylint.checkers.utils import node_type
|
||||
|
||||
from cognee.exceptions import InvalidValueError
|
||||
from cognee.modules.search.exceptions import UnsupportedSearchTypeError
|
||||
from cognee.modules.search.methods.search import search, specific_search
|
||||
from cognee.modules.search.types import SearchType
|
||||
from cognee.modules.users.models import User
|
||||
|
|
@ -217,7 +217,7 @@ async def test_specific_search_invalid_type(mock_user):
|
|||
query_type = "INVALID_TYPE" # Not a valid SearchType
|
||||
|
||||
# Execute and verify
|
||||
with pytest.raises(InvalidValueError) as excinfo:
|
||||
with pytest.raises(UnsupportedSearchTypeError) as excinfo:
|
||||
await specific_search(query_type, query, mock_user)
|
||||
|
||||
assert "Unsupported search type" in str(excinfo.value)
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue