fixes to postgres issue

This commit is contained in:
vasilije 2025-08-27 17:57:39 +02:00
parent 2e07c6cbc1
commit 5bc4a6cc02
3 changed files with 12 additions and 14 deletions

View file

@ -42,6 +42,7 @@ def create_relational_engine(
try:
# Test if asyncpg is available
import asyncpg
connection_string = (
f"postgresql+asyncpg://{db_username}:{db_password}@{db_host}:{db_port}/{db_name}"
)

View file

@ -7,14 +7,17 @@ from sqlalchemy import JSON, Column, Table, select, delete, MetaData
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
from sqlalchemy.exc import ProgrammingError
from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential
try:
from asyncpg import DeadlockDetectedError, DuplicateTableError, UniqueViolationError
except ImportError:
# PostgreSQL dependencies not installed, define dummy exceptions
class DeadlockDetectedError(Exception):
pass
class DuplicateTableError(Exception):
pass
class UniqueViolationError(Exception):
pass
@ -81,6 +84,7 @@ class PGVectorAdapter(SQLAlchemyAdapter, VectorDBInterface):
# Functions reading tables from database need to know what a Vector column type is
try:
from pgvector.sqlalchemy import Vector
self.Vector = Vector
except ImportError:
raise ImportError(

View file

@ -4,7 +4,6 @@ from urllib.parse import urlparse
from contextlib import asynccontextmanager
from cognee.infrastructure.files.utils.get_data_file_path import get_data_file_path
from cognee.infrastructure.files.storage.S3FileStorage import S3FileStorage
from cognee.infrastructure.files.storage.LocalFileStorage import LocalFileStorage
@ -23,23 +22,17 @@ async def open_data_file(file_path: str, mode: str = "rb", encoding: str = None,
yield file
elif file_path.startswith("s3://"):
try:
from cognee.infrastructure.files.storage.S3FileStorage import S3FileStorage
except ImportError:
raise ImportError(
"S3 dependencies are not installed. Please install with 'pip install cognee[aws]' to use S3 functionality."
)
normalized_url = get_data_file_path(file_path)
s3_dir_path = os.path.dirname(normalized_url)
s3_filename = os.path.basename(normalized_url)
# if "/" in s3_path:
# s3_dir = "/".join(s3_path.split("/")[:-1])
# s3_filename = s3_path.split("/")[-1]
# else:
# s3_dir = ""
# s3_filename = s3_path
# Extract filesystem path from S3 URL structure
# file_dir_path = (
# f"s3://{parsed_url.netloc}/{s3_dir}" if s3_dir else f"s3://{parsed_url.netloc}"
# )
# file_name = s3_filename
file_storage = S3FileStorage(s3_dir_path)
async with file_storage.open(s3_filename, mode=mode, **kwargs) as file: