fix: add pipeline_name to PipelineRun and change logging default to ERROR (#675)

<!-- .github/pull_request_template.md -->

## Description
<!-- Provide a clear description of the changes in this PR -->

## DCO Affirmation
I affirm that all code in every commit of this pull request conforms to
the terms of the Topoteretes Developer Certificate of Origin.
This commit is contained in:
Boris 2025-03-29 14:55:34 +01:00 committed by GitHub
parent 119fa1eb73
commit daed8d51f5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 168 additions and 298 deletions

View file

@ -4,4 +4,4 @@
<!-- Provide a clear description of the changes in this PR --> <!-- Provide a clear description of the changes in this PR -->
## DCO Affirmation ## DCO Affirmation
I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin.

View file

@ -1 +1 @@
from .add_v2 import add from .add import add

View file

@ -1,176 +1,47 @@
from typing import List, Union, BinaryIO from typing import Union, BinaryIO
from os import path
import asyncio
import dlt
import cognee.modules.ingestion as ingestion
from cognee.infrastructure.files.storage import LocalStorage
from cognee.modules.ingestion import get_matched_datasets, save_data_to_file
from cognee.shared.utils import send_telemetry
from cognee.base_config import get_base_config
from cognee.infrastructure.databases.relational import get_relational_engine
from cognee.modules.users.methods import get_default_user
from cognee.tasks.ingestion import get_dlt_destination
from cognee.modules.users.permissions.methods import give_permission_on_document
from cognee.modules.users.models import User from cognee.modules.users.models import User
from cognee.modules.data.methods import create_dataset from cognee.modules.users.methods import get_default_user
from cognee.modules.pipelines import run_tasks, Task
from cognee.tasks.ingestion import ingest_data, resolve_data_directories
from cognee.infrastructure.databases.relational import ( from cognee.infrastructure.databases.relational import (
create_db_and_tables as create_relational_db_and_tables, create_db_and_tables as create_relational_db_and_tables,
) )
from cognee.infrastructure.databases.vector.pgvector import ( from cognee.infrastructure.databases.vector.pgvector import (
create_db_and_tables as create_pgvector_db_and_tables, create_db_and_tables as create_pgvector_db_and_tables,
) )
from uuid import uuid5, NAMESPACE_OID
async def add( async def add(
data: Union[BinaryIO, List[BinaryIO], str, List[str]], data: Union[BinaryIO, list[BinaryIO], str, list[str]],
dataset_name: str = "main_dataset", dataset_name: str = "main_dataset",
user: User = None, user: User = None,
): ):
# Create tables for databases
await create_relational_db_and_tables() await create_relational_db_and_tables()
await create_pgvector_db_and_tables() await create_pgvector_db_and_tables()
if isinstance(data, str): # Initialize first_run attribute if it doesn't exist
if "data://" in data: if not hasattr(add, "first_run"):
# data is a data directory path add.first_run = True
datasets = get_matched_datasets(data.replace("data://", ""), dataset_name)
return await asyncio.gather(
*[add(file_paths, dataset_name) for [dataset_name, file_paths] in datasets]
)
if "file://" in data: if add.first_run:
# data is a file path from cognee.infrastructure.llm.utils import test_llm_connection, test_embedding_connection
return await add([data], dataset_name)
# data is text # Test LLM and Embedding configuration once before running Cognee
else: await test_llm_connection()
file_path = save_data_to_file(data) await test_embedding_connection()
return await add([file_path], dataset_name) add.first_run = False # Update flag after first run
if hasattr(data, "file"):
file_path = save_data_to_file(data.file, filename=data.filename)
return await add([file_path], dataset_name)
# data is a list of file paths or texts
file_paths = []
for data_item in data:
if hasattr(data_item, "file"):
file_paths.append(save_data_to_file(data_item, filename=data_item.filename))
elif isinstance(data_item, str) and (
data_item.startswith("/") or data_item.startswith("file://")
):
file_paths.append(data_item)
elif isinstance(data_item, str):
file_paths.append(save_data_to_file(data_item))
if len(file_paths) > 0:
return await add_files(file_paths, dataset_name, user)
return []
async def add_files(file_paths: List[str], dataset_name: str, user: User = None):
if user is None: if user is None:
user = await get_default_user() user = await get_default_user()
base_config = get_base_config() tasks = [Task(resolve_data_directories), Task(ingest_data, dataset_name, user)]
data_directory_path = base_config.data_root_directory
processed_file_paths = [] dataset_id = uuid5(NAMESPACE_OID, dataset_name)
pipeline = run_tasks(
for file_path in file_paths: tasks=tasks, dataset_id=dataset_id, data=data, pipeline_name="add_pipeline"
file_path = file_path.replace("file://", "")
if data_directory_path not in file_path:
file_name = file_path.split("/")[-1]
file_directory_path = (
data_directory_path
+ "/"
+ (dataset_name.replace(".", "/") + "/" if dataset_name != "main_dataset" else "")
)
dataset_file_path = path.join(file_directory_path, file_name)
LocalStorage.ensure_directory_exists(file_directory_path)
LocalStorage.copy_file(file_path, dataset_file_path)
processed_file_paths.append(dataset_file_path)
else:
processed_file_paths.append(file_path)
destination = get_dlt_destination()
pipeline = dlt.pipeline(
pipeline_name="file_load_from_filesystem",
destination=destination,
) )
dataset_name = ( async for pipeline_status in pipeline:
dataset_name.replace(" ", "_").replace(".", "_") print(f"Pipeline run status: {pipeline_status.pipeline_name} - {pipeline_status.status}")
if dataset_name is not None
else "main_dataset"
)
@dlt.resource(standalone=True, merge_key="id")
async def data_resources(file_paths: str, user: User):
for file_path in file_paths:
with open(file_path.replace("file://", ""), mode="rb") as file:
classified_data = ingestion.classify(file)
data_id = ingestion.identify(classified_data)
file_metadata = classified_data.get_metadata()
from sqlalchemy import select
from cognee.modules.data.models import Data
db_engine = get_relational_engine()
async with db_engine.get_async_session() as session:
dataset = await create_dataset(dataset_name, user.id, session)
data = (
await session.execute(select(Data).filter(Data.id == data_id))
).scalar_one_or_none()
if data is not None:
data.name = file_metadata["name"]
data.raw_data_location = file_metadata["file_path"]
data.extension = file_metadata["extension"]
data.mime_type = file_metadata["mime_type"]
await session.merge(data)
await session.commit()
else:
data = Data(
id=data_id,
name=file_metadata["name"],
raw_data_location=file_metadata["file_path"],
extension=file_metadata["extension"],
mime_type=file_metadata["mime_type"],
)
dataset.data.append(data)
await session.commit()
yield {
"id": data_id,
"name": file_metadata["name"],
"file_path": file_metadata["file_path"],
"extension": file_metadata["extension"],
"mime_type": file_metadata["mime_type"],
}
await give_permission_on_document(user, data_id, "read")
await give_permission_on_document(user, data_id, "write")
send_telemetry("cognee.add EXECUTION STARTED", user_id=user.id)
run_info = pipeline.run(
data_resources(processed_file_paths, user),
table_name="file_metadata",
dataset_name=dataset_name,
write_disposition="merge",
)
send_telemetry("cognee.add EXECUTION COMPLETED", user_id=user.id)
return run_info

View file

@ -1,47 +0,0 @@
from typing import Union, BinaryIO
from cognee.modules.users.models import User
from cognee.modules.users.methods import get_default_user
from cognee.modules.pipelines import run_tasks, Task
from cognee.tasks.ingestion import ingest_data, resolve_data_directories
from cognee.infrastructure.databases.relational import (
create_db_and_tables as create_relational_db_and_tables,
)
from cognee.infrastructure.databases.vector.pgvector import (
create_db_and_tables as create_pgvector_db_and_tables,
)
from uuid import uuid5, NAMESPACE_OID
async def add(
data: Union[BinaryIO, list[BinaryIO], str, list[str]],
dataset_name: str = "main_dataset",
user: User = None,
):
# Create tables for databases
await create_relational_db_and_tables()
await create_pgvector_db_and_tables()
# Initialize first_run attribute if it doesn't exist
if not hasattr(add, "first_run"):
add.first_run = True
if add.first_run:
from cognee.infrastructure.llm.utils import test_llm_connection, test_embedding_connection
# Test LLM and Embedding configuration once before running Cognee
await test_llm_connection()
await test_embedding_connection()
add.first_run = False # Update flag after first run
if user is None:
user = await get_default_user()
tasks = [Task(resolve_data_directories), Task(ingest_data, dataset_name, user)]
dataset_id = uuid5(NAMESPACE_OID, dataset_name)
pipeline = run_tasks(
tasks=tasks, dataset_id=dataset_id, data=data, pipeline_name="add_pipeline"
)
async for result in pipeline:
print(result)

View file

@ -5,7 +5,6 @@ import os
import json import json
import asyncio import asyncio
from cognee.shared.logging_utils import get_logger from cognee.shared.logging_utils import get_logger
from sqlalchemy import text
from typing import Dict, Any, List, Union from typing import Dict, Any, List, Union
from uuid import UUID from uuid import UUID
import aiofiles import aiofiles

View file

@ -1,15 +1,7 @@
from cognee.infrastructure.files.storage import LocalStorage from .get_relational_engine import get_relational_engine
from .ModelBase import Base
from .get_relational_engine import get_relational_engine, get_relational_config
async def create_db_and_tables(): async def create_db_and_tables():
relational_config = get_relational_config()
relational_engine = get_relational_engine() relational_engine = get_relational_engine()
if relational_engine.engine.dialect.name == "sqlite": await relational_engine.create_database()
LocalStorage.ensure_directory_exists(relational_config.db_path)
async with relational_engine.engine.begin() as connection:
if len(Base.metadata.tables.keys()) > 0:
await connection.run_sync(Base.metadata.create_all)

View file

@ -307,7 +307,7 @@ class SQLAlchemyAdapter:
raise e raise e
async def create_database(self): async def create_database(self):
if self.engine.dialect.name == "sqlite": if self.engine.dialect.name == "sqlite" and not os.path.exists(self.db_path):
from cognee.infrastructure.files.storage import LocalStorage from cognee.infrastructure.files.storage import LocalStorage
db_directory = path.dirname(self.db_path) db_directory = path.dirname(self.db_path)
@ -322,7 +322,9 @@ class SQLAlchemyAdapter:
if self.engine.dialect.name == "sqlite": if self.engine.dialect.name == "sqlite":
from cognee.infrastructure.files.storage import LocalStorage from cognee.infrastructure.files.storage import LocalStorage
LocalStorage.remove(self.db_path) await self.engine.dispose(close=True)
with open(self.db_path, "w") as file:
file.write("")
else: else:
async with self.engine.begin() as connection: async with self.engine.begin() as connection:
schema_list = await self.get_schema_list() schema_list = await self.get_schema_list()

View file

@ -20,6 +20,7 @@ class PipelineRun(Base):
status = Column(Enum(PipelineRunStatus)) status = Column(Enum(PipelineRunStatus))
pipeline_run_id = Column(UUID, index=True) pipeline_run_id = Column(UUID, index=True)
pipeline_name = Column(String)
pipeline_id = Column(UUID, index=True) pipeline_id = Column(UUID, index=True)
dataset_id = Column(UUID, index=True) dataset_id = Column(UUID, index=True)
run_info = Column(JSON) run_info = Column(JSON)

View file

@ -1,4 +1,4 @@
from uuid import UUID, uuid4 from uuid import UUID
from cognee.infrastructure.databases.relational import get_relational_engine from cognee.infrastructure.databases.relational import get_relational_engine
from cognee.modules.data.models import Data from cognee.modules.data.models import Data
from cognee.modules.pipelines.models import PipelineRun, PipelineRunStatus from cognee.modules.pipelines.models import PipelineRun, PipelineRunStatus
@ -6,7 +6,7 @@ from typing import Any
async def log_pipeline_run_complete( async def log_pipeline_run_complete(
pipeline_run_id: UUID, pipeline_id: str, dataset_id: UUID, data: Any pipeline_run_id: UUID, pipeline_id: str, pipeline_name: str, dataset_id: UUID, data: Any
): ):
if not data: if not data:
data_info = "None" data_info = "None"
@ -17,6 +17,7 @@ async def log_pipeline_run_complete(
pipeline_run = PipelineRun( pipeline_run = PipelineRun(
pipeline_run_id=pipeline_run_id, pipeline_run_id=pipeline_run_id,
pipeline_name=pipeline_name,
pipeline_id=pipeline_id, pipeline_id=pipeline_id,
status=PipelineRunStatus.DATASET_PROCESSING_COMPLETED, status=PipelineRunStatus.DATASET_PROCESSING_COMPLETED,
dataset_id=dataset_id, dataset_id=dataset_id,

View file

@ -1,4 +1,4 @@
from uuid import UUID, uuid4 from uuid import UUID
from cognee.infrastructure.databases.relational import get_relational_engine from cognee.infrastructure.databases.relational import get_relational_engine
from cognee.modules.data.models import Data from cognee.modules.data.models import Data
from cognee.modules.pipelines.models import PipelineRun, PipelineRunStatus from cognee.modules.pipelines.models import PipelineRun, PipelineRunStatus
@ -6,7 +6,12 @@ from typing import Any
async def log_pipeline_run_error( async def log_pipeline_run_error(
pipeline_run_id: UUID, pipeline_id: str, dataset_id: UUID, data: Any, e: Exception pipeline_run_id: UUID,
pipeline_id: str,
pipeline_name: str,
dataset_id: UUID,
data: Any,
e: Exception,
): ):
if not data: if not data:
data_info = "None" data_info = "None"
@ -17,6 +22,7 @@ async def log_pipeline_run_error(
pipeline_run = PipelineRun( pipeline_run = PipelineRun(
pipeline_run_id=pipeline_run_id, pipeline_run_id=pipeline_run_id,
pipeline_name=pipeline_name,
pipeline_id=pipeline_id, pipeline_id=pipeline_id,
status=PipelineRunStatus.DATASET_PROCESSING_ERRORED, status=PipelineRunStatus.DATASET_PROCESSING_ERRORED,
dataset_id=dataset_id, dataset_id=dataset_id,

View file

@ -5,7 +5,7 @@ from cognee.modules.pipelines.models import PipelineRun, PipelineRunStatus
from typing import Any from typing import Any
async def log_pipeline_run_start(pipeline_id: str, dataset_id: UUID, data: Any): async def log_pipeline_run_start(pipeline_id: str, pipeline_name: str, dataset_id: UUID, data: Any):
if not data: if not data:
data_info = "None" data_info = "None"
elif isinstance(data, list) and all(isinstance(item, Data) for item in data): elif isinstance(data, list) and all(isinstance(item, Data) for item in data):
@ -17,6 +17,7 @@ async def log_pipeline_run_start(pipeline_id: str, dataset_id: UUID, data: Any):
pipeline_run = PipelineRun( pipeline_run = PipelineRun(
pipeline_run_id=pipeline_run_id, pipeline_run_id=pipeline_run_id,
pipeline_name=pipeline_name,
pipeline_id=pipeline_id, pipeline_id=pipeline_id,
status=PipelineRunStatus.DATASET_PROCESSING_STARTED, status=PipelineRunStatus.DATASET_PROCESSING_STARTED,
dataset_id=dataset_id, dataset_id=dataset_id,

View file

@ -277,7 +277,7 @@ async def run_tasks(
): ):
pipeline_id = uuid5(NAMESPACE_OID, pipeline_name) pipeline_id = uuid5(NAMESPACE_OID, pipeline_name)
pipeline_run = await log_pipeline_run_start(pipeline_id, dataset_id, data) pipeline_run = await log_pipeline_run_start(pipeline_id, pipeline_name, dataset_id, data)
yield pipeline_run yield pipeline_run
pipeline_run_id = pipeline_run.pipeline_run_id pipeline_run_id = pipeline_run.pipeline_run_id
@ -286,8 +286,12 @@ async def run_tasks(
async for _ in run_tasks_with_telemetry(tasks, data, pipeline_id): async for _ in run_tasks_with_telemetry(tasks, data, pipeline_id):
pass pass
yield await log_pipeline_run_complete(pipeline_run_id, pipeline_id, dataset_id, data) yield await log_pipeline_run_complete(
pipeline_run_id, pipeline_id, pipeline_name, dataset_id, data
)
except Exception as e: except Exception as e:
yield await log_pipeline_run_error(pipeline_run_id, pipeline_id, dataset_id, data, e) yield await log_pipeline_run_error(
pipeline_run_id, pipeline_id, pipeline_name, dataset_id, data, e
)
raise e raise e

View file

@ -1,5 +1,5 @@
import sys
import os import os
import sys
import threading import threading
import logging import logging
import structlog import structlog
@ -14,6 +14,15 @@ WARNING = logging.WARNING
ERROR = logging.ERROR ERROR = logging.ERROR
CRITICAL = logging.CRITICAL CRITICAL = logging.CRITICAL
log_levels = {
"CRITICAL": logging.CRITICAL,
"ERROR": logging.ERROR,
"WARNING": logging.WARNING,
"INFO": logging.INFO,
"DEBUG": logging.DEBUG,
"NOTSET": logging.NOTSET,
}
# Track if logging has been configured # Track if logging has been configured
_is_configured = False _is_configured = False
@ -108,12 +117,12 @@ class PlainFileHandler(logging.FileHandler):
self.flush() self.flush()
def get_logger(name=None, level=INFO): def get_logger(name=None, level=None):
"""Get a configured structlog logger. """Get a configured structlog logger.
Args: Args:
name: Logger name (default: None, uses __name__) name: Logger name (default: None, uses __name__)
level: Logging level (default: INFO) level: Logging level (default: None)
Returns: Returns:
A configured structlog logger instance A configured structlog logger instance
@ -164,17 +173,19 @@ def cleanup_old_logs(logs_dir, max_files):
return False return False
def setup_logging(log_level=INFO, name=None): def setup_logging(log_level=None, name=None):
"""Sets up the logging configuration with structlog integration. """Sets up the logging configuration with structlog integration.
Args: Args:
log_level: The logging level to use (default: INFO) log_level: The logging level to use (default: None, uses INFO)
name: Optional logger name (default: None, uses __name__) name: Optional logger name (default: None, uses __name__)
Returns: Returns:
A configured structlog logger instance A configured structlog logger instance
""" """
log_level = log_level if log_level else log_levels[os.getenv("LOG_LEVEL", "INFO")]
def exception_handler(logger, method_name, event_dict): def exception_handler(logger, method_name, event_dict):
"""Custom processor to handle uncaught exceptions.""" """Custom processor to handle uncaught exceptions."""
# Check if there's an exc_info that needs to be processed # Check if there's an exc_info that needs to be processed
@ -274,6 +285,17 @@ def setup_logging(log_level=INFO, name=None):
root_logger.addHandler(file_handler) root_logger.addHandler(file_handler)
root_logger.setLevel(log_level) root_logger.setLevel(log_level)
if log_level > logging.WARNING:
import warnings
from sqlalchemy.exc import SAWarning
warnings.filterwarnings(
"ignore", category=SAWarning, module="dlt.destinations.impl.sqlalchemy.merge_job"
)
warnings.filterwarnings(
"ignore", category=SAWarning, module="dlt.destinations.impl.sqlalchemy.load_jobs"
)
# Clean up old log files, keeping only the most recent ones # Clean up old log files, keeping only the most recent ones
cleanup_old_logs(LOGS_DIR, MAX_LOG_FILES) cleanup_old_logs(LOGS_DIR, MAX_LOG_FILES)

View file

@ -18,7 +18,7 @@ async def ingest_data(data: Any, dataset_name: str, user: User):
destination = get_dlt_destination() destination = get_dlt_destination()
pipeline = dlt.pipeline( pipeline = dlt.pipeline(
pipeline_name="file_load_from_filesystem", pipeline_name="metadata_extraction_pipeline",
destination=destination, destination=destination,
) )
@ -119,6 +119,7 @@ async def ingest_data(data: Any, dataset_name: str, user: User):
await give_permission_on_document(user, data_id, "read") await give_permission_on_document(user, data_id, "read")
await give_permission_on_document(user, data_id, "write") await give_permission_on_document(user, data_id, "write")
return file_paths return file_paths
db_engine = get_relational_engine() db_engine = get_relational_engine()

View file

@ -89,9 +89,9 @@ async def main():
from cognee.infrastructure.databases.relational import get_relational_engine from cognee.infrastructure.databases.relational import get_relational_engine
assert not os.path.exists(get_relational_engine().db_path), ( with open(get_relational_engine().db_path, "r") as file:
"SQLite relational database is not empty" content = file.read()
) assert content == "", "SQLite relational database is not empty"
from cognee.infrastructure.databases.graph import get_graph_config from cognee.infrastructure.databases.graph import get_graph_config

View file

@ -86,7 +86,7 @@ async def main():
user = await get_default_user() user = await get_default_user()
history = await get_history(user.id) history = await get_history(user.id)
assert len(history) == 10, "Search history is not correct." assert len(history) == 8, "Search history is not correct."
await cognee.prune.prune_data() await cognee.prune.prune_data()
assert not os.path.isdir(data_directory_path), "Local data files are not deleted" assert not os.path.isdir(data_directory_path), "Local data files are not deleted"

147
poetry.lock generated
View file

@ -1692,14 +1692,14 @@ files = [
[[package]] [[package]]
name = "dlt" name = "dlt"
version = "1.8.1" version = "1.9.0"
description = "dlt is an open-source python-first scalable data loading library that does not require any backend to run." description = "dlt is an open-source python-first scalable data loading library that does not require any backend to run."
optional = false optional = false
python-versions = "<3.14,>=3.9" python-versions = "<3.14,>=3.9"
groups = ["main"] groups = ["main"]
files = [ files = [
{file = "dlt-1.8.1-py3-none-any.whl", hash = "sha256:154699cc70e4263a294b576ca8d22bb7e153bfb872acabba08fcfecd9b9d285a"}, {file = "dlt-1.9.0-py3-none-any.whl", hash = "sha256:4b95f5ba243a4b694d33915145d71ed389499e68e76693b80219de53108a31b8"},
{file = "dlt-1.8.1.tar.gz", hash = "sha256:6ff9c56d7ea416cd01bce874348023042a441d6f83b35495d234efd709d9fd77"}, {file = "dlt-1.9.0.tar.gz", hash = "sha256:d274e9de1e993a2cf21862c9c34a457fa4ba3794e9be379d8f5a7530fbc709e9"},
] ]
[package.dependencies] [package.dependencies]
@ -1728,6 +1728,7 @@ semver = ">=3.0.0"
setuptools = ">=65.6.0" setuptools = ">=65.6.0"
simplejson = ">=3.17.5" simplejson = ">=3.17.5"
sqlalchemy = {version = ">=1.4", optional = true, markers = "extra == \"sql-database\" or extra == \"sqlalchemy\" or extra == \"pyiceberg\""} sqlalchemy = {version = ">=1.4", optional = true, markers = "extra == \"sql-database\" or extra == \"sqlalchemy\" or extra == \"pyiceberg\""}
sqlglot = ">=23.0.0"
tenacity = ">=8.0.2" tenacity = ">=8.0.2"
tomlkit = ">=0.11.3" tomlkit = ">=0.11.3"
typing-extensions = ">=4.8.0" typing-extensions = ">=4.8.0"
@ -1744,7 +1745,7 @@ databricks = ["databricks-sdk (>=0.38.0)", "databricks-sql-connector (>=2.9.3,<4
deltalake = ["deltalake (>=0.21.0)", "pyarrow (>=12.0.0,<18) ; python_version >= \"3.9\" and python_version < \"3.13\"", "pyarrow (>=18.0.0) ; python_version >= \"3.13\""] deltalake = ["deltalake (>=0.21.0)", "pyarrow (>=12.0.0,<18) ; python_version >= \"3.9\" and python_version < \"3.13\"", "pyarrow (>=18.0.0) ; python_version >= \"3.13\""]
dremio = ["pyarrow (>=12.0.0,<18) ; python_version >= \"3.9\" and python_version < \"3.13\"", "pyarrow (>=18.0.0) ; python_version >= \"3.13\""] dremio = ["pyarrow (>=12.0.0,<18) ; python_version >= \"3.9\" and python_version < \"3.13\"", "pyarrow (>=18.0.0) ; python_version >= \"3.13\""]
duckdb = ["duckdb (>=0.9)"] duckdb = ["duckdb (>=0.9)"]
filesystem = ["botocore (>=1.28)", "s3fs (>=2022.4.0)", "sqlglot (>=20.0.0)"] filesystem = ["botocore (>=1.28)", "s3fs (>=2022.4.0)"]
gcp = ["db-dtypes (>=1.2.0)", "gcsfs (>=2022.4.0)", "google-cloud-bigquery (>=2.26.0)", "grpcio (>=1.50.0)"] gcp = ["db-dtypes (>=1.2.0)", "gcsfs (>=2022.4.0)", "google-cloud-bigquery (>=2.26.0)", "grpcio (>=1.50.0)"]
gs = ["gcsfs (>=2022.4.0)"] gs = ["gcsfs (>=2022.4.0)"]
lancedb = ["lancedb (>=0.8.2) ; python_version < \"3.13\"", "pyarrow (>=12.0.0,<18) ; python_version >= \"3.9\" and python_version < \"3.13\"", "pyarrow (>=18.0.0) ; python_version >= \"3.13\"", "tantivy (>=0.22.0)"] lancedb = ["lancedb (>=0.8.2) ; python_version < \"3.13\"", "pyarrow (>=12.0.0,<18) ; python_version >= \"3.9\" and python_version < \"3.13\"", "pyarrow (>=18.0.0) ; python_version >= \"3.13\"", "tantivy (>=0.22.0)"]
@ -2585,7 +2586,7 @@ description = "Lightweight in-process concurrent programming"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
groups = ["main"] groups = ["main"]
markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""
files = [ files = [
{file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"},
{file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"},
@ -9549,73 +9550,73 @@ markers = {main = "extra == \"deepeval\" or extra == \"docs\" or extra == \"eval
[[package]] [[package]]
name = "sqlalchemy" name = "sqlalchemy"
version = "2.0.36" version = "2.0.39"
description = "Database Abstraction Library" description = "Database Abstraction Library"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
groups = ["main"] groups = ["main"]
files = [ files = [
{file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, {file = "SQLAlchemy-2.0.39-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:66a40003bc244e4ad86b72abb9965d304726d05a939e8c09ce844d27af9e6d37"},
{file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, {file = "SQLAlchemy-2.0.39-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67de057fbcb04a066171bd9ee6bcb58738d89378ee3cabff0bffbf343ae1c787"},
{file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, {file = "SQLAlchemy-2.0.39-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:533e0f66c32093a987a30df3ad6ed21170db9d581d0b38e71396c49718fbb1ca"},
{file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, {file = "SQLAlchemy-2.0.39-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7399d45b62d755e9ebba94eb89437f80512c08edde8c63716552a3aade61eb42"},
{file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, {file = "SQLAlchemy-2.0.39-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:788b6ff6728072b313802be13e88113c33696a9a1f2f6d634a97c20f7ef5ccce"},
{file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, {file = "SQLAlchemy-2.0.39-cp37-cp37m-win32.whl", hash = "sha256:01da15490c9df352fbc29859d3c7ba9cd1377791faeeb47c100832004c99472c"},
{file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, {file = "SQLAlchemy-2.0.39-cp37-cp37m-win_amd64.whl", hash = "sha256:f2bcb085faffcacf9319b1b1445a7e1cfdc6fb46c03f2dce7bc2d9a4b3c1cdc5"},
{file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, {file = "SQLAlchemy-2.0.39-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b761a6847f96fdc2d002e29e9e9ac2439c13b919adfd64e8ef49e75f6355c548"},
{file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, {file = "SQLAlchemy-2.0.39-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0d7e3866eb52d914aea50c9be74184a0feb86f9af8aaaa4daefe52b69378db0b"},
{file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, {file = "SQLAlchemy-2.0.39-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:995c2bacdddcb640c2ca558e6760383dcdd68830160af92b5c6e6928ffd259b4"},
{file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, {file = "SQLAlchemy-2.0.39-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:344cd1ec2b3c6bdd5dfde7ba7e3b879e0f8dd44181f16b895940be9b842fd2b6"},
{file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, {file = "SQLAlchemy-2.0.39-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5dfbc543578058c340360f851ddcecd7a1e26b0d9b5b69259b526da9edfa8875"},
{file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, {file = "SQLAlchemy-2.0.39-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3395e7ed89c6d264d38bea3bfb22ffe868f906a7985d03546ec7dc30221ea980"},
{file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, {file = "SQLAlchemy-2.0.39-cp38-cp38-win32.whl", hash = "sha256:bf555f3e25ac3a70c67807b2949bfe15f377a40df84b71ab2c58d8593a1e036e"},
{file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, {file = "SQLAlchemy-2.0.39-cp38-cp38-win_amd64.whl", hash = "sha256:463ecfb907b256e94bfe7bcb31a6d8c7bc96eca7cbe39803e448a58bb9fcad02"},
{file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, {file = "sqlalchemy-2.0.39-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6827f8c1b2f13f1420545bd6d5b3f9e0b85fe750388425be53d23c760dcf176b"},
{file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, {file = "sqlalchemy-2.0.39-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9f119e7736967c0ea03aff91ac7d04555ee038caf89bb855d93bbd04ae85b41"},
{file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, {file = "sqlalchemy-2.0.39-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4600c7a659d381146e1160235918826c50c80994e07c5b26946a3e7ec6c99249"},
{file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, {file = "sqlalchemy-2.0.39-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a06e6c8e31c98ddc770734c63903e39f1947c9e3e5e4bef515c5491b7737dde"},
{file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, {file = "sqlalchemy-2.0.39-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4c433f78c2908ae352848f56589c02b982d0e741b7905228fad628999799de4"},
{file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, {file = "sqlalchemy-2.0.39-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7bd5c5ee1448b6408734eaa29c0d820d061ae18cb17232ce37848376dcfa3e92"},
{file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, {file = "sqlalchemy-2.0.39-cp310-cp310-win32.whl", hash = "sha256:87a1ce1f5e5dc4b6f4e0aac34e7bb535cb23bd4f5d9c799ed1633b65c2bcad8c"},
{file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, {file = "sqlalchemy-2.0.39-cp310-cp310-win_amd64.whl", hash = "sha256:871f55e478b5a648c08dd24af44345406d0e636ffe021d64c9b57a4a11518304"},
{file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, {file = "sqlalchemy-2.0.39-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a28f9c238f1e143ff42ab3ba27990dfb964e5d413c0eb001b88794c5c4a528a9"},
{file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, {file = "sqlalchemy-2.0.39-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08cf721bbd4391a0e765fe0fe8816e81d9f43cece54fdb5ac465c56efafecb3d"},
{file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, {file = "sqlalchemy-2.0.39-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a8517b6d4005facdbd7eb4e8cf54797dbca100a7df459fdaff4c5123265c1cd"},
{file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, {file = "sqlalchemy-2.0.39-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b2de1523d46e7016afc7e42db239bd41f2163316935de7c84d0e19af7e69538"},
{file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, {file = "sqlalchemy-2.0.39-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:412c6c126369ddae171c13987b38df5122cb92015cba6f9ee1193b867f3f1530"},
{file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, {file = "sqlalchemy-2.0.39-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b35e07f1d57b79b86a7de8ecdcefb78485dab9851b9638c2c793c50203b2ae8"},
{file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, {file = "sqlalchemy-2.0.39-cp311-cp311-win32.whl", hash = "sha256:3eb14ba1a9d07c88669b7faf8f589be67871d6409305e73e036321d89f1d904e"},
{file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, {file = "sqlalchemy-2.0.39-cp311-cp311-win_amd64.whl", hash = "sha256:78f1b79132a69fe8bd6b5d91ef433c8eb40688ba782b26f8c9f3d2d9ca23626f"},
{file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, {file = "sqlalchemy-2.0.39-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c457a38351fb6234781d054260c60e531047e4d07beca1889b558ff73dc2014b"},
{file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, {file = "sqlalchemy-2.0.39-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:018ee97c558b499b58935c5a152aeabf6d36b3d55d91656abeb6d93d663c0c4c"},
{file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, {file = "sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5493a8120d6fc185f60e7254fc056a6742f1db68c0f849cfc9ab46163c21df47"},
{file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, {file = "sqlalchemy-2.0.39-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2cf5b5ddb69142511d5559c427ff00ec8c0919a1e6c09486e9c32636ea2b9dd"},
{file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, {file = "sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f03143f8f851dd8de6b0c10784363712058f38209e926723c80654c1b40327a"},
{file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, {file = "sqlalchemy-2.0.39-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06205eb98cb3dd52133ca6818bf5542397f1dd1b69f7ea28aa84413897380b06"},
{file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, {file = "sqlalchemy-2.0.39-cp312-cp312-win32.whl", hash = "sha256:7f5243357e6da9a90c56282f64b50d29cba2ee1f745381174caacc50d501b109"},
{file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, {file = "sqlalchemy-2.0.39-cp312-cp312-win_amd64.whl", hash = "sha256:2ed107331d188a286611cea9022de0afc437dd2d3c168e368169f27aa0f61338"},
{file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, {file = "sqlalchemy-2.0.39-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fe193d3ae297c423e0e567e240b4324d6b6c280a048e64c77a3ea6886cc2aa87"},
{file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, {file = "sqlalchemy-2.0.39-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:79f4f502125a41b1b3b34449e747a6abfd52a709d539ea7769101696bdca6716"},
{file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, {file = "sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a10ca7f8a1ea0fd5630f02feb055b0f5cdfcd07bb3715fc1b6f8cb72bf114e4"},
{file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, {file = "sqlalchemy-2.0.39-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6b0a1c7ed54a5361aaebb910c1fa864bae34273662bb4ff788a527eafd6e14d"},
{file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, {file = "sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52607d0ebea43cf214e2ee84a6a76bc774176f97c5a774ce33277514875a718e"},
{file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, {file = "sqlalchemy-2.0.39-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c08a972cbac2a14810463aec3a47ff218bb00c1a607e6689b531a7c589c50723"},
{file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, {file = "sqlalchemy-2.0.39-cp313-cp313-win32.whl", hash = "sha256:23c5aa33c01bd898f879db158537d7e7568b503b15aad60ea0c8da8109adf3e7"},
{file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, {file = "sqlalchemy-2.0.39-cp313-cp313-win_amd64.whl", hash = "sha256:4dabd775fd66cf17f31f8625fc0e4cfc5765f7982f94dc09b9e5868182cb71c0"},
{file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, {file = "sqlalchemy-2.0.39-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2600a50d590c22d99c424c394236899ba72f849a02b10e65b4c70149606408b5"},
{file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, {file = "sqlalchemy-2.0.39-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4eff9c270afd23e2746e921e80182872058a7a592017b2713f33f96cc5f82e32"},
{file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, {file = "sqlalchemy-2.0.39-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7332868ce891eda48896131991f7f2be572d65b41a4050957242f8e935d5d7"},
{file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, {file = "sqlalchemy-2.0.39-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125a7763b263218a80759ad9ae2f3610aaf2c2fbbd78fff088d584edf81f3782"},
{file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, {file = "sqlalchemy-2.0.39-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:04545042969833cb92e13b0a3019549d284fd2423f318b6ba10e7aa687690a3c"},
{file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, {file = "sqlalchemy-2.0.39-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:805cb481474e111ee3687c9047c5f3286e62496f09c0e82e8853338aaaa348f8"},
{file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, {file = "sqlalchemy-2.0.39-cp39-cp39-win32.whl", hash = "sha256:34d5c49f18778a3665d707e6286545a30339ad545950773d43977e504815fa70"},
{file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, {file = "sqlalchemy-2.0.39-cp39-cp39-win_amd64.whl", hash = "sha256:35e72518615aa5384ef4fae828e3af1b43102458b74a8c481f69af8abf7e802a"},
{file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, {file = "sqlalchemy-2.0.39-py3-none-any.whl", hash = "sha256:a1c6b0a5e3e326a466d809b651c63f278b1256146a377a528b6938a279da334f"},
{file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, {file = "sqlalchemy-2.0.39.tar.gz", hash = "sha256:5d2d1fe548def3267b4c70a8568f108d1fed7cbbeccb9cc166e05af2abc25c22"},
] ]
[package.dependencies] [package.dependencies]
greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""}
typing-extensions = ">=4.6.0" typing-extensions = ">=4.6.0"
[package.extras] [package.extras]
@ -9643,6 +9644,22 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
pymysql = ["pymysql"] pymysql = ["pymysql"]
sqlcipher = ["sqlcipher3_binary"] sqlcipher = ["sqlcipher3_binary"]
[[package]]
name = "sqlglot"
version = "26.11.1"
description = "An easily customizable SQL parser and transpiler"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "sqlglot-26.11.1-py3-none-any.whl", hash = "sha256:5659fb46937ed89da6854e30eb318f57110f5526ef555407a2bf8f714e70496d"},
{file = "sqlglot-26.11.1.tar.gz", hash = "sha256:e1e7c9bbabc9f8cefa35aa07daf3ac0048dacd9bc3131225785596beb0d844d6"},
]
[package.extras]
dev = ["duckdb (>=0.6)", "maturin (>=1.4,<2.0)", "mypy", "pandas", "pandas-stubs", "pdoc", "pre-commit", "python-dateutil", "pytz", "ruff (==0.7.2)", "types-python-dateutil", "types-pytz", "typing_extensions"]
rs = ["sqlglotrs (==0.4.0)"]
[[package]] [[package]]
name = "squarify" name = "squarify"
version = "0.4.4" version = "0.4.4"
@ -11252,4 +11269,4 @@ weaviate = ["weaviate-client"]
[metadata] [metadata]
lock-version = "2.1" lock-version = "2.1"
python-versions = ">=3.10,<=3.13" python-versions = ">=3.10,<=3.13"
content-hash = "f1cdc7b38e1551ec9cc5a1eaf195dde0607c466e2c490d1d882f645082fee656" content-hash = "4bda223028508503b326912854c60fa4a5f60349370d26f22dd997d0dec11e01"

View file

@ -29,7 +29,7 @@ numpy = ">=1.26.4, <=2.1"
pandas = "2.2.3" pandas = "2.2.3"
boto3 = "^1.26.125" boto3 = "^1.26.125"
botocore="^1.35.54" botocore="^1.35.54"
sqlalchemy = "2.0.36" sqlalchemy = "2.0.39"
aiosqlite = "^0.20.0" aiosqlite = "^0.20.0"
tiktoken = "<=0.9.0" tiktoken = "<=0.9.0"
litellm = ">=1.57.4" litellm = ">=1.57.4"
@ -52,7 +52,7 @@ fastapi = {version = "0.115.7"}
fastapi-users = {version = "14.0.0", extras = ["sqlalchemy"]} fastapi-users = {version = "14.0.0", extras = ["sqlalchemy"]}
uvicorn = {version = "0.34.0", optional = true} uvicorn = {version = "0.34.0", optional = true}
gunicorn = {version = "^20.1.0", optional = true} gunicorn = {version = "^20.1.0", optional = true}
dlt = {extras = ["sqlalchemy"], version = "^1.4.1"} dlt = {extras = ["sqlalchemy"], version = "^1.9.0"}
qdrant-client = {version = "^1.9.0", optional = true} qdrant-client = {version = "^1.9.0", optional = true}
weaviate-client = {version = "4.9.6", optional = true} weaviate-client = {version = "4.9.6", optional = true}
neo4j = {version = "^5.20.0", optional = true} neo4j = {version = "^5.20.0", optional = true}