fixes to postgres issue
This commit is contained in:
parent
2e07c6cbc1
commit
5bc4a6cc02
3 changed files with 12 additions and 14 deletions
|
|
@ -42,6 +42,7 @@ def create_relational_engine(
|
||||||
try:
|
try:
|
||||||
# Test if asyncpg is available
|
# Test if asyncpg is available
|
||||||
import asyncpg
|
import asyncpg
|
||||||
|
|
||||||
connection_string = (
|
connection_string = (
|
||||||
f"postgresql+asyncpg://{db_username}:{db_password}@{db_host}:{db_port}/{db_name}"
|
f"postgresql+asyncpg://{db_username}:{db_password}@{db_host}:{db_port}/{db_name}"
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -7,14 +7,17 @@ from sqlalchemy import JSON, Column, Table, select, delete, MetaData
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker
|
||||||
from sqlalchemy.exc import ProgrammingError
|
from sqlalchemy.exc import ProgrammingError
|
||||||
from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential
|
from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from asyncpg import DeadlockDetectedError, DuplicateTableError, UniqueViolationError
|
from asyncpg import DeadlockDetectedError, DuplicateTableError, UniqueViolationError
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# PostgreSQL dependencies not installed, define dummy exceptions
|
# PostgreSQL dependencies not installed, define dummy exceptions
|
||||||
class DeadlockDetectedError(Exception):
|
class DeadlockDetectedError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class DuplicateTableError(Exception):
|
class DuplicateTableError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class UniqueViolationError(Exception):
|
class UniqueViolationError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
@ -81,6 +84,7 @@ class PGVectorAdapter(SQLAlchemyAdapter, VectorDBInterface):
|
||||||
# Functions reading tables from database need to know what a Vector column type is
|
# Functions reading tables from database need to know what a Vector column type is
|
||||||
try:
|
try:
|
||||||
from pgvector.sqlalchemy import Vector
|
from pgvector.sqlalchemy import Vector
|
||||||
|
|
||||||
self.Vector = Vector
|
self.Vector = Vector
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ImportError(
|
raise ImportError(
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,6 @@ from urllib.parse import urlparse
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
|
|
||||||
from cognee.infrastructure.files.utils.get_data_file_path import get_data_file_path
|
from cognee.infrastructure.files.utils.get_data_file_path import get_data_file_path
|
||||||
from cognee.infrastructure.files.storage.S3FileStorage import S3FileStorage
|
|
||||||
from cognee.infrastructure.files.storage.LocalFileStorage import LocalFileStorage
|
from cognee.infrastructure.files.storage.LocalFileStorage import LocalFileStorage
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -23,23 +22,17 @@ async def open_data_file(file_path: str, mode: str = "rb", encoding: str = None,
|
||||||
yield file
|
yield file
|
||||||
|
|
||||||
elif file_path.startswith("s3://"):
|
elif file_path.startswith("s3://"):
|
||||||
|
try:
|
||||||
|
from cognee.infrastructure.files.storage.S3FileStorage import S3FileStorage
|
||||||
|
except ImportError:
|
||||||
|
raise ImportError(
|
||||||
|
"S3 dependencies are not installed. Please install with 'pip install cognee[aws]' to use S3 functionality."
|
||||||
|
)
|
||||||
|
|
||||||
normalized_url = get_data_file_path(file_path)
|
normalized_url = get_data_file_path(file_path)
|
||||||
s3_dir_path = os.path.dirname(normalized_url)
|
s3_dir_path = os.path.dirname(normalized_url)
|
||||||
s3_filename = os.path.basename(normalized_url)
|
s3_filename = os.path.basename(normalized_url)
|
||||||
|
|
||||||
# if "/" in s3_path:
|
|
||||||
# s3_dir = "/".join(s3_path.split("/")[:-1])
|
|
||||||
# s3_filename = s3_path.split("/")[-1]
|
|
||||||
# else:
|
|
||||||
# s3_dir = ""
|
|
||||||
# s3_filename = s3_path
|
|
||||||
|
|
||||||
# Extract filesystem path from S3 URL structure
|
|
||||||
# file_dir_path = (
|
|
||||||
# f"s3://{parsed_url.netloc}/{s3_dir}" if s3_dir else f"s3://{parsed_url.netloc}"
|
|
||||||
# )
|
|
||||||
# file_name = s3_filename
|
|
||||||
|
|
||||||
file_storage = S3FileStorage(s3_dir_path)
|
file_storage = S3FileStorage(s3_dir_path)
|
||||||
|
|
||||||
async with file_storage.open(s3_filename, mode=mode, **kwargs) as file:
|
async with file_storage.open(s3_filename, mode=mode, **kwargs) as file:
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue