Merge pull request #95 from topoteretes/urgent_fix_deployment

Urgent fix deployment
This commit is contained in:
Vasilije 2024-05-26 16:13:13 +02:00 committed by GitHub
commit c63518ff5f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
70 changed files with 1037 additions and 695 deletions

View file

@ -25,14 +25,16 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
os: ["ubuntu-latest", "macos-latest", "windows-latest"] os: ["ubuntu-latest", "macos-latest"] #, "windows-latest"
python-version: ["3.11.x"] python-version: ["3.11.x"]
# Test all python versions on ubuntu only # Test all python versions on ubuntu only
include: include:
- python-version: "3.9.x"
os: "ubuntu-latest"
- python-version: "3.10.x" - python-version: "3.10.x"
os: "ubuntu-latest" os: "ubuntu-latest"
- python-version: "3.12.x" # - python-version: "3.12.x"
os: "ubuntu-latest" # os: "ubuntu-latest"
defaults: defaults:
run: run:
@ -72,13 +74,16 @@ jobs:
- name: Install dependencies - name: Install dependencies
run: poetry install --no-interaction run: poetry install --no-interaction
# - name: Build with Poetry - name: Create .cognee_system directory and print path
# run: poetry build run: |
# mkdir .cognee_system
# - name: Install Package echo $(pwd)/.cognee_system
# run: |
# cd dist
# pip install *.whl - name: Run tests
run: poetry run pytest tests/
# - name: Download NLTK Punkt Tokenizer Models # - name: Download NLTK Punkt Tokenizer Models
# run: | # run: |
@ -90,11 +95,18 @@ jobs:
- name: Run test script - name: Run test script
env: env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
QDRANT_API_KEY: ${{ secrets.QDRANT_API_KEY }} LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }}
QDRANT_URL: ${{ secrets.QDRANT_API_URL }}
ENV: 'dev' ENV: 'dev'
run: poetry run python ./cognee/tests/test_library.py run: poetry run python ./cognee/tests/test_library.py
- name: Build with Poetry
run: poetry build
- name: Install Package
run: |
cd dist
pip install *.whl
# - run: | # - run: |
# poetry run # poetry run
# if: runner.os != 'Windows' # if: runner.os != 'Windows'

View file

@ -1,11 +1,16 @@
""" FastAPI server for the Cognee API. """ """ FastAPI server for the Cognee API. """
import os import os
import aiohttp import aiohttp
import uvicorn import uvicorn
import json import json
import logging import logging
from typing import Dict, Any, List, Union, Optional
from typing_extensions import Annotated
from fastapi import FastAPI, HTTPException, Form, File, UploadFile, Query
from fastapi.responses import JSONResponse, FileResponse
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
# Set up logging # Set up logging
logging.basicConfig( logging.basicConfig(
level=logging.INFO, # Set the logging level (e.g., DEBUG, INFO, WARNING, ERROR, CRITICAL) level=logging.INFO, # Set the logging level (e.g., DEBUG, INFO, WARNING, ERROR, CRITICAL)
@ -14,15 +19,10 @@ logging.basicConfig(
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
from cognee.config import Config from cognee.config import Config
config = Config() config = Config()
config.load() config.load()
from typing import Dict, Any, List, Union, Annotated, Literal, Optional
from fastapi import FastAPI, HTTPException, Form, File, UploadFile, Query
from fastapi.responses import JSONResponse, FileResponse
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
app = FastAPI(debug=True) app = FastAPI(debug=True)
origins = [ origins = [
@ -33,19 +33,12 @@ origins = [
app.add_middleware( app.add_middleware(
CORSMiddleware, CORSMiddleware,
allow_origins = origins, allow_origins=origins,
allow_credentials = True, allow_credentials=True,
allow_methods = ["OPTIONS", "GET", "POST", "DELETE"], allow_methods=["OPTIONS", "GET", "POST", "DELETE"],
allow_headers = ["*"], allow_headers=["*"],
) )
#
# from auth.cognito.JWTBearer import JWTBearer
# from auth.auth import jwks
#
# auth = JWTBearer(jwks)
@app.get("/") @app.get("/")
async def root(): async def root():
""" """
@ -53,7 +46,6 @@ async def root():
""" """
return {"message": "Hello, World, I am alive!"} return {"message": "Hello, World, I am alive!"}
@app.get("/health") @app.get("/health")
def health_check(): def health_check():
""" """
@ -61,11 +53,9 @@ def health_check():
""" """
return {"status": "OK"} return {"status": "OK"}
class Payload(BaseModel): class Payload(BaseModel):
payload: Dict[str, Any] payload: Dict[str, Any]
@app.get("/datasets", response_model=list) @app.get("/datasets", response_model=list)
async def get_datasets(): async def get_datasets():
from cognee import datasets from cognee import datasets
@ -74,77 +64,67 @@ async def get_datasets():
@app.delete("/datasets/{dataset_id}", response_model=dict) @app.delete("/datasets/{dataset_id}", response_model=dict)
async def delete_dataset(dataset_id: str): async def delete_dataset(dataset_id: str):
from cognee import datasets from cognee import datasets
datasets.delete_dataset(dataset_id) datasets.delete_dataset(dataset_id)
return JSONResponse( return JSONResponse(
status_code = 200, status_code=200,
content = "OK", content="OK",
) )
@app.get("/datasets/{dataset_id}/graph", response_model=list) @app.get("/datasets/{dataset_id}/graph", response_model=list)
async def get_dataset_graph(dataset_id: str): async def get_dataset_graph(dataset_id: str):
from cognee import utils from cognee import utils
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client
graph_engine = infrastructure_config.get_config("graph_engine") graph_engine = infrastructure_config.get_config()["graph_engine"]
graph_client = await get_graph_client(graph_engine) graph_client = await get_graph_client(graph_engine)
graph_url = await utils.render_graph(graph_client.graph) graph_url = await utils.render_graph(graph_client.graph)
return JSONResponse( return JSONResponse(
status_code = 200, status_code=200,
content = str(graph_url), content=str(graph_url),
) )
@app.get("/datasets/{dataset_id}/data", response_model=list) @app.get("/datasets/{dataset_id}/data", response_model=list)
async def get_dataset_data(dataset_id: str): async def get_dataset_data(dataset_id: str):
from cognee import datasets from cognee import datasets
dataset_data = datasets.list_data(dataset_id) dataset_data = datasets.list_data(dataset_id)
if dataset_data is None: if dataset_data is None:
raise HTTPException(status_code = 404, detail = f"Dataset ({dataset_id}) not found.") raise HTTPException(status_code=404, detail=f"Dataset ({dataset_id}) not found.")
return [
return [dict( dict(
id = data["id"], id=data["id"],
name = f"{data['name']}.{data['extension']}", name=f"{data['name']}.{data['extension']}",
keywords = data["keywords"].split("|"), keywords=data["keywords"].split("|"),
filePath = data["file_path"], filePath=data["file_path"],
mimeType = data["mime_type"], mimeType=data["mime_type"],
) for data in dataset_data] )
for data in dataset_data
]
@app.get("/datasets/status", response_model=dict) @app.get("/datasets/status", response_model=dict)
async def get_dataset_status(datasets: Annotated[list, Query(alias = "dataset")] = None): async def get_dataset_status(datasets: Annotated[List[str], Query(alias="dataset")] = None):
from cognee import datasets as cognee_datasets from cognee import datasets as cognee_datasets
datasets_statuses = cognee_datasets.get_status(datasets) datasets_statuses = cognee_datasets.get_status(datasets)
return JSONResponse( return JSONResponse(
status_code = 200, status_code=200,
content = datasets_statuses content=datasets_statuses
) )
@app.get("/datasets/{dataset_id}/data/{data_id}/raw", response_class=FileResponse) @app.get("/datasets/{dataset_id}/data/{data_id}/raw", response_class=FileResponse)
async def get_raw_data(dataset_id: str, data_id: str): async def get_raw_data(dataset_id: str, data_id: str):
from cognee import datasets from cognee import datasets
dataset_data = datasets.list_data(dataset_id) dataset_data = datasets.list_data(dataset_id)
if dataset_data is None: if dataset_data is None:
raise HTTPException(status_code = 404, detail = f"Dataset ({dataset_id}) not found.") raise HTTPException(status_code=404, detail=f"Dataset ({dataset_id}) not found.")
data = [data for data in dataset_data if data["id"] == data_id][0] data = [data for data in dataset_data if data["id"] == data_id][0]
return data["file_path"] return data["file_path"]
class AddPayload(BaseModel): class AddPayload(BaseModel):
data: Union[str, UploadFile, List[Union[str, UploadFile]]] data: Union[str, UploadFile, List[Union[str, UploadFile]]]
dataset_id: str dataset_id: str
class Config: class Config:
arbitrary_types_allowed = True # This is required to allow the use of Union arbitrary_types_allowed = True
@app.post("/add", response_model=dict) @app.post("/add", response_model=dict)
async def add( async def add(
@ -153,7 +133,6 @@ async def add(
): ):
""" This endpoint is responsible for adding data to the graph.""" """ This endpoint is responsible for adding data to the graph."""
from cognee import add as cognee_add from cognee import add as cognee_add
try: try:
if isinstance(data, str) and data.startswith("http"): if isinstance(data, str) and data.startswith("http"):
if "github" in data: if "github" in data:
@ -182,69 +161,62 @@ async def add(
datasetId, datasetId,
) )
return JSONResponse( return JSONResponse(
status_code = 200, status_code=200,
content = "OK" content="OK"
) )
except Exception as error: except Exception as error:
return JSONResponse( return JSONResponse(
status_code = 409, status_code=409,
content = { "error": str(error) } content={"error": str(error)}
) )
class CognifyPayload(BaseModel): class CognifyPayload(BaseModel):
datasets: list[str] datasets: List[str]
@app.post("/cognify", response_model=dict) @app.post("/cognify", response_model=dict)
async def cognify(payload: CognifyPayload): async def cognify(payload: CognifyPayload):
""" This endpoint is responsible for the cognitive processing of the content.""" """ This endpoint is responsible for the cognitive processing of the content."""
from cognee import cognify as cognee_cognify from cognee import cognify as cognee_cognify
try: try:
await cognee_cognify(payload.datasets) await cognee_cognify(payload.datasets)
return JSONResponse( return JSONResponse(
status_code = 200, status_code=200,
content = "OK" content="OK"
) )
except Exception as error: except Exception as error:
return JSONResponse( return JSONResponse(
status_code = 409, status_code=409,
content = { "error": error } content={"error": str(error)}
) )
class SearchPayload(BaseModel): class SearchPayload(BaseModel):
query_params: Dict[str, Any] query_params: Dict[str, Any]
@app.post("/search", response_model=dict) @app.post("/search", response_model=dict)
async def search(payload: SearchPayload): async def search(payload: SearchPayload):
""" This endpoint is responsible for searching for nodes in the graph.""" """ This endpoint is responsible for searching for nodes in the graph."""
from cognee import search as cognee_search from cognee import search as cognee_search
try: try:
search_type = payload.query_params["searchType"] search_type = payload.query_params["searchType"]
params = { params = {
"query": payload.query_params["query"], "query": payload.query_params["query"],
} }
results = await cognee_search(search_type, params) results = await cognee_search(search_type, params)
return JSONResponse( return JSONResponse(
status_code = 200, status_code=200,
content = json.dumps(results) content=json.dumps(results)
) )
except Exception as error: except Exception as error:
return JSONResponse( return JSONResponse(
status_code = 409, status_code=409,
content = { "error": error } content={"error": str(error)}
) )
@app.get("/settings", response_model=dict) @app.get("/settings", response_model=dict)
async def get_settings(): async def get_settings():
from cognee.modules.settings import get_settings from cognee.modules.settings import get_settings
return get_settings() return get_settings()
class LLMConfig(BaseModel): class LLMConfig(BaseModel):
provider: Union[Literal["openai"], Literal["ollama"], Literal["anthropic"]] provider: Union[Literal["openai"], Literal["ollama"], Literal["anthropic"]]
model: str model: str
@ -264,15 +236,14 @@ async def save_config(new_settings: SettingsPayload):
from cognee.modules.settings import save_llm_config, save_vector_db_config from cognee.modules.settings import save_llm_config, save_vector_db_config
if new_settings.llm is not None: if new_settings.llm is not None:
await save_llm_config(new_settings.llm) await save_llm_config(new_settings.llm)
if new_settings.vectorDB is not None: if new_settings.vectorDB is not None:
await save_vector_db_config(new_settings.vectorDB) await save_vector_db_config(new_settings.vectorDB)
return JSONResponse( return JSONResponse(
status_code = 200, status_code=200,
content = "OK", content="OK",
) )
def start_api_server(host: str = "0.0.0.0", port: int = 8000): def start_api_server(host: str = "0.0.0.0", port: int = 8000):
""" """
Start the API server using uvicorn. Start the API server using uvicorn.

View file

@ -8,6 +8,11 @@ from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.files.storage import LocalStorage from cognee.infrastructure.files.storage import LocalStorage
from cognee.modules.discovery import discover_directory_datasets from cognee.modules.discovery import discover_directory_datasets
from cognee.utils import send_telemetry from cognee.utils import send_telemetry
from cognee.base_config import get_base_config
base_config = get_base_config()
from cognee.infrastructure.databases.relational.config import get_relationaldb_config
relational_config = get_relationaldb_config()
async def add(data: Union[BinaryIO, List[BinaryIO], str, List[str]], dataset_name: str = None): async def add(data: Union[BinaryIO, List[BinaryIO], str, List[str]], dataset_name: str = None):
@ -46,10 +51,10 @@ async def add(data: Union[BinaryIO, List[BinaryIO], str, List[str]], dataset_nam
return [] return []
async def add_files(file_paths: List[str], dataset_name: str): async def add_files(file_paths: List[str], dataset_name: str):
infra_config = infrastructure_config.get_config() # infra_config = infrastructure_config.get_config()
data_directory_path = infra_config["data_root_directory"] data_directory_path = base_config.data_root_directory
LocalStorage.ensure_directory_exists(infra_config["database_directory_path"]) LocalStorage.ensure_directory_exists(relational_config.database_directory_path)
processed_file_paths = [] processed_file_paths = []
@ -68,7 +73,7 @@ async def add_files(file_paths: List[str], dataset_name: str):
else: else:
processed_file_paths.append(file_path) processed_file_paths.append(file_path)
db = duckdb.connect(infra_config["database_path"]) db = duckdb.connect(relational_config.db_file_path)
destination = dlt.destinations.duckdb( destination = dlt.destinations.duckdb(
credentials = db, credentials = db,
@ -120,7 +125,7 @@ async def add_data_directory(data_path: str, dataset_name: str = None):
return await asyncio.gather(*results) return await asyncio.gather(*results)
def save_data_to_file(data: Union[str, BinaryIO], dataset_name: str, filename: str = None): def save_data_to_file(data: Union[str, BinaryIO], dataset_name: str, filename: str = None):
data_directory_path = infrastructure_config.get_config()["data_root_directory"] data_directory_path = base_config.data_root_directory
classified_data = ingestion.classify(data, filename) classified_data = ingestion.classify(data, filename)
# data_id = ingestion.identify(classified_data) # data_id = ingestion.identify(classified_data)

View file

@ -3,7 +3,9 @@ from uuid import UUID, uuid4
from typing import Union, BinaryIO, List from typing import Union, BinaryIO, List
import cognee.modules.ingestion as ingestion import cognee.modules.ingestion as ingestion
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.databases.relational.config import get_relationaldb_config
relational_config = get_relationaldb_config()
class DatasetException(Exception): class DatasetException(Exception):
message: str message: str
@ -16,7 +18,7 @@ async def add_standalone(
dataset_id: UUID = uuid4(), dataset_id: UUID = uuid4(),
dataset_name: str = None dataset_name: str = None
): ):
db_engine = infrastructure_config.get_config()["database_engine"] db_engine = relational_config.database_engine
if db_engine.is_db_done is not True: if db_engine.is_db_done is not True:
await db_engine.ensure_tables() await db_engine.ensure_tables()

View file

@ -6,6 +6,7 @@ import nltk
from nltk.corpus import stopwords from nltk.corpus import stopwords
from cognee.config import Config from cognee.config import Config
from cognee.infrastructure.data.chunking.LangchainChunkingEngine import LangchainChunkEngine from cognee.infrastructure.data.chunking.LangchainChunkingEngine import LangchainChunkEngine
from cognee.infrastructure.databases.graph.config import get_graph_config
from cognee.infrastructure.databases.vector.embeddings.DefaultEmbeddingEngine import LiteLLMEmbeddingEngine from cognee.infrastructure.databases.vector.embeddings.DefaultEmbeddingEngine import LiteLLMEmbeddingEngine
from cognee.modules.cognify.graph.add_node_connections import group_nodes_by_layer, \ from cognee.modules.cognify.graph.add_node_connections import group_nodes_by_layer, \
graph_ready_output, connect_nodes_in_graph graph_ready_output, connect_nodes_in_graph
@ -29,10 +30,25 @@ from cognee.shared.data_models import ChunkStrategy, KnowledgeGraph
from cognee.utils import send_telemetry from cognee.utils import send_telemetry
from cognee.modules.tasks import create_task_status_table, update_task_status from cognee.modules.tasks import create_task_status_table, update_task_status
from cognee.shared.SourceCodeGraph import SourceCodeGraph from cognee.shared.SourceCodeGraph import SourceCodeGraph
from cognee.base_config import get_base_config
from cognee.infrastructure.data.chunking.config import get_chunk_config
from cognee.modules.cognify.config import get_cognify_config
from cognee.infrastructure.databases.vector.embeddings.config import get_embedding_config
from cognee.infrastructure.databases.relational.config import get_relationaldb_config
graph_config = get_graph_config()
config = Config() config = Config()
config.load() config.load()
relational_config = get_relationaldb_config()
cognify_config = get_cognify_config()
chunk_config = get_chunk_config()
base_config = get_base_config()
embedding_config = get_embedding_config()
# aclient = instructor.patch(OpenAI()) # aclient = instructor.patch(OpenAI())
USER_ID = "default_user" USER_ID = "default_user"
@ -46,11 +62,11 @@ async def cognify(datasets: Union[str, List[str]] = None):
stopwords.ensure_loaded() stopwords.ensure_loaded()
create_task_status_table() create_task_status_table()
graph_db_type = infrastructure_config.get_config()["graph_engine"] graph_db_type = graph_config.graph_engine
graph_client = await get_graph_client(graph_db_type) graph_client = await get_graph_client(graph_db_type)
db_engine = infrastructure_config.get_config()["database_engine"] db_engine = relational_config.database_engine
if datasets is None or len(datasets) == 0: if datasets is None or len(datasets) == 0:
datasets = db_engine.get_datasets() datasets = db_engine.get_datasets()
@ -76,8 +92,8 @@ async def cognify(datasets: Union[str, List[str]] = None):
dataset_files.append((added_dataset, db_engine.get_files_metadata(added_dataset))) dataset_files.append((added_dataset, db_engine.get_files_metadata(added_dataset)))
chunk_engine = infrastructure_config.get_config()["chunk_engine"] chunk_engine = chunk_config.chunk_engine
chunk_strategy = infrastructure_config.get_config()["chunk_strategy"] chunk_strategy = chunk_config.chunk_strategy
async def process_batch(files_batch): async def process_batch(files_batch):
data_chunks = {} data_chunks = {}
@ -91,7 +107,7 @@ async def cognify(datasets: Union[str, List[str]] = None):
text = "empty file" text = "empty file"
if text == "": if text == "":
text = "empty file" text = "empty file"
subchunks = chunk_engine.chunk_data(chunk_strategy, text, config.chunk_size, config.chunk_overlap) subchunks = chunk_engine.chunk_data(chunk_strategy, text, chunk_config.chunk_size, chunk_config.chunk_overlap)
if dataset_name not in data_chunks: if dataset_name not in data_chunks:
data_chunks[dataset_name] = [] data_chunks[dataset_name] = []
@ -128,7 +144,7 @@ async def cognify(datasets: Union[str, List[str]] = None):
for (dataset_name, files) in dataset_files: for (dataset_name, files) in dataset_files:
for file_metadata in files: for file_metadata in files:
graph_topology = infrastructure_config.get_config()["graph_model"] graph_topology = graph_config.graph_model
if graph_topology == SourceCodeGraph: if graph_topology == SourceCodeGraph:
parent_node_id = f"{file_metadata['name']}.{file_metadata['extension']}" parent_node_id = f"{file_metadata['name']}.{file_metadata['extension']}"
@ -161,9 +177,10 @@ async def cognify(datasets: Union[str, List[str]] = None):
async def process_text(chunk_collection: str, chunk_id: str, input_text: str, file_metadata: dict, document_id: str): async def process_text(chunk_collection: str, chunk_id: str, input_text: str, file_metadata: dict, document_id: str):
print(f"Processing chunk ({chunk_id}) from document ({file_metadata['id']}).") print(f"Processing chunk ({chunk_id}) from document ({file_metadata['id']}).")
graph_client = await get_graph_client(infrastructure_config.get_config()["graph_engine"]) graph_client = await get_graph_client(graph_config.graph_engine)
print("graph_client", graph_client)
graph_topology = infrastructure_config.get_config()["graph_model"] graph_topology = cognify_config.graph_model
if graph_topology == SourceCodeGraph: if graph_topology == SourceCodeGraph:
classified_categories = [{"data_type": "text", "category_name": "Code and functions"}] classified_categories = [{"data_type": "text", "category_name": "Code and functions"}]
elif graph_topology == KnowledgeGraph: elif graph_topology == KnowledgeGraph:
@ -185,7 +202,7 @@ async def process_text(chunk_collection: str, chunk_id: str, input_text: str, fi
print(f"Chunk ({chunk_id}) summarized.") print(f"Chunk ({chunk_id}) summarized.")
cognitive_layers = await get_cognitive_layers(input_text, classified_categories) cognitive_layers = await get_cognitive_layers(input_text, classified_categories)
cognitive_layers = cognitive_layers[:config.cognitive_layers_limit] cognitive_layers = cognitive_layers[:cognify_config.cognitive_layers_limit]
try: try:
cognitive_layers = (await add_cognitive_layers(graph_client, document_id, cognitive_layers))[:2] cognitive_layers = (await add_cognitive_layers(graph_client, document_id, cognitive_layers))[:2]
@ -196,8 +213,8 @@ async def process_text(chunk_collection: str, chunk_id: str, input_text: str, fi
pass pass
if infrastructure_config.get_config()["connect_documents"] is True: if cognify_config.connect_documents is True:
db_engine = infrastructure_config.get_config()["database_engine"] db_engine = relational_config.database_engine
relevant_documents_to_connect = db_engine.fetch_cognify_data(excluded_document_id = document_id) relevant_documents_to_connect = db_engine.fetch_cognify_data(excluded_document_id = document_id)
list_of_nodes = [] list_of_nodes = []
@ -219,7 +236,7 @@ async def process_text(chunk_collection: str, chunk_id: str, input_text: str, fi
await connect_nodes_in_graph( await connect_nodes_in_graph(
graph_client, graph_client,
relationships, relationships,
score_threshold = infrastructure_config.get_config()["intra_layer_score_treshold"] score_threshold = cognify_config.intra_layer_score_treshold
) )
send_telemetry("cognee.cognify") send_telemetry("cognee.cognify")

View file

@ -1,80 +1,77 @@
""" This module is used to set the configuration of the system.""" """ This module is used to set the configuration of the system."""
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.base_config import get_base_config
from cognee.infrastructure.databases.graph.config import get_graph_config
from cognee.infrastructure.data.chunking.config import get_chunk_config
from cognee.modules.cognify.config import get_cognify_config
cognify_config = get_cognify_config()
chunk_config = get_chunk_config()
graph_config = get_graph_config()
base_config = get_base_config()
class config(): class config():
@staticmethod @staticmethod
def system_root_directory(system_root_directory: str): def system_root_directory(system_root_directory: str):
infrastructure_config.set_config({ base_config.system_root_directory = system_root_directory
"system_root_directory": system_root_directory
})
@staticmethod @staticmethod
def data_root_directory(data_root_directory: str): def data_root_directory(data_root_directory: str):
infrastructure_config.set_config({ base_config.data_root_directory = data_root_directory
"data_root_directory": data_root_directory
}) @staticmethod
def monitoring_tool(monitoring_tool: object):
base_config.monitoring_tool = monitoring_tool
@staticmethod @staticmethod
def set_classification_model(classification_model: object): def set_classification_model(classification_model: object):
infrastructure_config.set_config({ cognify_config.classification_model = classification_model
"classification_model": classification_model
})
@staticmethod @staticmethod
def set_summarization_model(summarization_model: object): def set_summarization_model(summarization_model: object):
infrastructure_config.set_config({ cognify_config.summarization_model=summarization_model
"summarization_model": summarization_model
})
@staticmethod @staticmethod
def set_labeling_model(labeling_model: object): def set_labeling_model(labeling_model: object):
infrastructure_config.set_config({ cognify_config.labeling_model =labeling_model
"labeling_model": labeling_model
})
@staticmethod @staticmethod
def set_graph_model(graph_model: object): def set_graph_model(graph_model: object):
infrastructure_config.set_config({ graph_config.graph_model =graph_model
"graph_model": graph_model
})
@staticmethod @staticmethod
def set_cognitive_layer_model(cognitive_layer_model: object): def set_cognitive_layer_model(cognitive_layer_model: object):
infrastructure_config.set_config({ cognify_config.cognitive_layer_model =cognitive_layer_model
"cognitive_layer_model": cognitive_layer_model
})
@staticmethod @staticmethod
def set_graph_engine(graph_engine: object): def set_graph_engine(graph_engine: object):
infrastructure_config.set_config({ graph_config.graph_engine =graph_engine
"graph_engine": graph_engine
})
@staticmethod @staticmethod
def llm_provider(llm_provider: str): def llm_provider(llm_provider: str):
infrastructure_config.set_config({ graph_config.llm_provider = llm_provider
"llm_provider": llm_provider
})
@staticmethod @staticmethod
def intra_layer_score_treshold(intra_layer_score_treshold: str): def intra_layer_score_treshold(intra_layer_score_treshold: str):
infrastructure_config.set_config({ cognify_config.intra_layer_score_treshold =intra_layer_score_treshold
"intra_layer_score_treshold": intra_layer_score_treshold
})
@staticmethod @staticmethod
def connect_documents(connect_documents: bool): def connect_documents(connect_documents: bool):
infrastructure_config.set_config({ cognify_config.connect_documents = connect_documents
"connect_documents": connect_documents
})
@staticmethod @staticmethod
def set_chunk_strategy(chunk_strategy: object): def set_chunk_strategy(chunk_strategy: object):
infrastructure_config.set_config({ chunk_config.chunk_strategy = chunk_strategy
"chunk_strategy": chunk_strategy
})
@staticmethod @staticmethod
def set_graph_topology(graph_topology: object): def set_graph_topology(graph_topology: object):
infrastructure_config.set_config({ get_cognify_config.graph_topology =graph_topology
"graph_topology": graph_topology
})

View file

@ -1,11 +1,14 @@
from duckdb import CatalogException from duckdb import CatalogException
from cognee.modules.discovery import discover_directory_datasets from cognee.modules.discovery import discover_directory_datasets
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.databases.relational.config import get_relationaldb_config
relational_config = get_relationaldb_config()
class datasets(): class datasets():
@staticmethod @staticmethod
def list_datasets(): def list_datasets():
db = infrastructure_config.get_config("database_engine") db = relational_config.db_engine
return db.get_datasets() return db.get_datasets()
@staticmethod @staticmethod
@ -14,7 +17,7 @@ class datasets():
@staticmethod @staticmethod
def list_data(dataset_name: str): def list_data(dataset_name: str):
db = infrastructure_config.get_config("database_engine") db = relational_config.db_engine
try: try:
return db.get_files_metadata(dataset_name) return db.get_files_metadata(dataset_name)
except CatalogException: except CatalogException:
@ -22,7 +25,7 @@ class datasets():
@staticmethod @staticmethod
def get_status(dataset_ids: list[str]) -> dict: def get_status(dataset_ids: list[str]) -> dict:
db = infrastructure_config.get_config("database_engine") db = relational_config.db_engine
try: try:
return db.get_data("cognee_task_status", { return db.get_data("cognee_task_status", {
"data_id": dataset_ids "data_id": dataset_ids
@ -32,7 +35,7 @@ class datasets():
@staticmethod @staticmethod
def delete_dataset(dataset_id: str): def delete_dataset(dataset_id: str):
db = infrastructure_config.get_config("database_engine") db = relational_config.db_engine
try: try:
return db.delete_table(dataset_id) return db.delete_table(dataset_id)
except CatalogException: except CatalogException:

View file

@ -1,11 +1,17 @@
from cognee.base_config import get_base_config
from cognee.infrastructure.files.storage import LocalStorage from cognee.infrastructure.files.storage import LocalStorage
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client
base_config =get_base_config()
from cognee.infrastructure.databases.graph.config import get_graph_config
from cognee.infrastructure.databases.vector.config import get_vectordb_config
graph_config = get_graph_config()
vector_config = get_vectordb_config()
class prune(): class prune():
@staticmethod @staticmethod
async def prune_data(): async def prune_data():
data_root_directory = infrastructure_config.get_config()["data_root_directory"] data_root_directory = base_config.data_root_directory
LocalStorage.remove_all(data_root_directory) LocalStorage.remove_all(data_root_directory)
@staticmethod @staticmethod
@ -13,11 +19,11 @@ class prune():
infra_config = infrastructure_config.get_config() infra_config = infrastructure_config.get_config()
if graph: if graph:
graph_client = await get_graph_client(infra_config["graph_engine"]) graph_client = await get_graph_client(graph_config.graph_engine)
await graph_client.delete_graph() await graph_client.delete_graph()
if vector: if vector:
vector_client = infra_config["vector_engine"] vector_client = vector_config.vector_engine
await vector_client.prune() await vector_client.prune()

View file

@ -13,6 +13,8 @@ from cognee.modules.search.graph.search_summary import search_summary
from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.utils import send_telemetry from cognee.utils import send_telemetry
from cognee.infrastructure.databases.graph.config import get_graph_config
graph_config = get_graph_config()
class SearchType(Enum): class SearchType(Enum):
ADJACENT = 'ADJACENT' ADJACENT = 'ADJACENT'
@ -49,7 +51,7 @@ async def search(search_type: str, params: Dict[str, Any]) -> List:
async def specific_search(query_params: List[SearchParameters]) -> List: async def specific_search(query_params: List[SearchParameters]) -> List:
graph_client = await get_graph_client(infrastructure_config.get_config()["graph_engine"]) graph_client = await get_graph_client(graph_config.graph_engine)
graph = graph_client.graph graph = graph_client.graph
search_functions: Dict[SearchType, Callable] = { search_functions: Dict[SearchType, Callable] = {

View file

@ -7,8 +7,10 @@ from cognee.modules.topology.topology import TopologyEngine, GitHubRepositoryMod
import pandas as pd import pandas as pd
from pydantic import BaseModel from pydantic import BaseModel
USER_ID = "default_user" USER_ID = "default_user"
async def add_topology(directory="example", model=GitHubRepositoryModel):
async def add_topology(directory: str = "example", model: BaseModel = GitHubRepositoryModel) -> Any:
graph_db_type = infrastructure_config.get_config()["graph_engine"] graph_db_type = infrastructure_config.get_config()["graph_engine"]
graph_client = await get_graph_client(graph_db_type) graph_client = await get_graph_client(graph_db_type)
@ -16,7 +18,7 @@ async def add_topology(directory="example", model=GitHubRepositoryModel):
graph_topology = infrastructure_config.get_config()["graph_topology"] graph_topology = infrastructure_config.get_config()["graph_topology"]
engine = TopologyEngine() engine = TopologyEngine()
topology = await engine.infer_from_directory_structure(node_id =USER_ID , repository = directory, model=model) topology = await engine.infer_from_directory_structure(node_id=USER_ID, repository=directory, model=model)
def flatten_model(model: BaseModel, parent_id: Optional[str] = None) -> Dict[str, Any]: def flatten_model(model: BaseModel, parent_id: Optional[str] = None) -> Dict[str, Any]:
"""Flatten a single Pydantic model to a dictionary handling nested structures.""" """Flatten a single Pydantic model to a dictionary handling nested structures."""
@ -42,17 +44,16 @@ async def add_topology(directory="example", model=GitHubRepositoryModel):
else: else:
return [] return []
def flatten_repository(repo_model): def flatten_repository(repo_model: BaseModel) -> List[Dict[str, Any]]:
""" Flatten the entire repository model, starting with the top-level model """ """ Flatten the entire repository model, starting with the top-level model """
return recursive_flatten(repo_model) return recursive_flatten(repo_model)
flt_topology = flatten_repository(topology) flt_topology = flatten_repository(topology)
df =pd.DataFrame(flt_topology) df = pd.DataFrame(flt_topology)
print(df.head(10)) print(df.head(10))
for _, row in df.iterrows(): for _, row in df.iterrows():
node_data = row.to_dict() node_data = row.to_dict()
node_id = node_data.pop('node_id') node_id = node_data.pop('node_id')
@ -65,9 +66,10 @@ async def add_topology(directory="example", model=GitHubRepositoryModel):
return graph_client.graph return graph_client.graph
if __name__ == "__main__": if __name__ == "__main__":
async def test(): async def test() -> None:
# Uncomment and modify the following lines as needed
# await prune.prune_system() # await prune.prune_system()
# # #
# from cognee.api.v1.add import add # from cognee.api.v1.add import add
# data_directory_path = os.path.abspath("../../../.data") # data_directory_path = os.path.abspath("../../../.data")
# # print(data_directory_path) # # print(data_directory_path)
@ -75,7 +77,7 @@ if __name__ == "__main__":
# # cognee_directory_path = os.path.abspath("../.cognee_system") # # cognee_directory_path = os.path.abspath("../.cognee_system")
# # config.system_root_directory(cognee_directory_path) # # config.system_root_directory(cognee_directory_path)
# #
# await add("data://" +data_directory_path, "example") # await add("data://" + data_directory_path, "example")
# graph = await add_topology() # graph = await add_topology()
@ -88,4 +90,4 @@ if __name__ == "__main__":
await render_graph(graph_client.graph, include_color=True, include_nodes=False, include_size=False) await render_graph(graph_client.graph, include_color=True, include_nodes=False, include_size=False)
import asyncio import asyncio
asyncio.run(test()) asyncio.run(test())

30
cognee/base_config.py Normal file
View file

@ -0,0 +1,30 @@
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
from cognee.root_dir import get_absolute_path
from cognee.shared.data_models import MonitoringTool
# Monitoring tool
class BaseConfig(BaseSettings):
system_root_directory: str = get_absolute_path(".cognee_system")
data_root_directory: str = get_absolute_path(".data")
monitoring_tool: object = MonitoringTool.LANGFUSE
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")
def to_dict(self) -> dict:
return {
"system_root_directory": self.system_root_directory,
"data_root_directory": self.data_root_directory,
"monitoring_tool": self.monitoring_tool,
}
@lru_cache
def get_base_config():
return BaseConfig()

View file

@ -9,14 +9,17 @@ from pathlib import Path
from dotenv import load_dotenv from dotenv import load_dotenv
from cognee.root_dir import get_absolute_path from cognee.root_dir import get_absolute_path
from cognee.shared.data_models import ChunkStrategy, DefaultGraphModel from cognee.shared.data_models import ChunkStrategy, DefaultGraphModel
logging.basicConfig(level=logging.DEBUG)
def load_dontenv(): def load_dontenv():
base_dir = Path(__file__).resolve().parent.parent base_dir = Path(__file__).resolve().parent.parent
# Load the .env file from the base directory # Load the .env file from the base directory
dotenv_path = base_dir / ".env" dotenv_path = base_dir / ".env"
load_dotenv(dotenv_path=dotenv_path, override = True) load_dotenv(dotenv_path=dotenv_path, override = True)
load_dontenv() try:
load_dontenv()
except:
pass
@dataclass @dataclass
class Config: class Config:
@ -32,6 +35,7 @@ class Config:
system_root_directory = get_absolute_path(".cognee_system") system_root_directory = get_absolute_path(".cognee_system")
logging.info("system_root_directory: %s", system_root_directory)
data_root_directory = os.getenv("DATA_PATH", get_absolute_path(".data")) data_root_directory = os.getenv("DATA_PATH", get_absolute_path(".data"))
vectordb: str = os.getenv("VECTORDB", "weaviate") vectordb: str = os.getenv("VECTORDB", "weaviate")
@ -40,21 +44,14 @@ class Config:
qdrant_url: str = os.getenv("QDRANT_URL", None) qdrant_url: str = os.getenv("QDRANT_URL", None)
qdrant_api_key: str = os.getenv("QDRANT_API_KEY", None) qdrant_api_key: str = os.getenv("QDRANT_API_KEY", None)
db_path = str = os.getenv("COGNEE_DB_PATH", "databases")
db_name: str = os.getenv("DB_NAME", "cognee.db")
db_host: str = os.getenv("DB_HOST", "localhost")
db_port: str = os.getenv("DB_PORT", "5432")
db_user: str = os.getenv("DB_USER", "cognee")
db_password: str = os.getenv("DB_PASSWORD", "cognee")
sqlalchemy_logging: bool = os.getenv("SQLALCHEMY_LOGGING", True)
graph_filename = os.getenv("GRAPH_NAME", "cognee_graph.pkl") graph_filename = os.getenv("GRAPH_NAME", "cognee_graph.pkl")
# Model parameters # Model parameters
llm_provider: str = os.getenv("LLM_PROVIDER", "openai") #openai, or custom or ollama llm_provider: str = os.getenv("LLM_PROVIDER", "openai") #openai, or custom or ollama
llm_model: str = os.getenv("LLM_MODEL", None) llm_model: str = os.getenv("LLM_MODEL", "gpt-4")
llm_api_key: str = os.getenv("LLM_API_KEY", None) llm_api_key: str = os.getenv("LLM_API_KEY", os.getenv("OPENAI_API_KEY"))
llm_endpoint: str = os.getenv("LLM_ENDPOINT", None) llm_endpoint: str = os.getenv("LLM_ENDPOINT", None)
# custom_model: str = os.getenv("CUSTOM_LLM_MODEL", "llama3-70b-8192") #"mistralai/Mixtral-8x7B-Instruct-v0.1" # custom_model: str = os.getenv("CUSTOM_LLM_MODEL", "llama3-70b-8192") #"mistralai/Mixtral-8x7B-Instruct-v0.1"
@ -67,10 +64,10 @@ class Config:
# model_endpoint: str = "openai" # model_endpoint: str = "openai"
# llm_api_key: Optional[str] = os.getenv("OPENAI_API_KEY") # llm_api_key: Optional[str] = os.getenv("OPENAI_API_KEY")
openai_temperature: float = float(os.getenv("OPENAI_TEMPERATURE", 0.0)) openai_temperature: float = float(os.getenv("OPENAI_TEMPERATURE", 0.0))
openai_embedding_model = "text-embedding-3-large" # openai_embedding_model = "text-embedding-3-large"
openai_embedding_dimensions = 3072 # openai_embedding_dimensions = 3072
litellm_embedding_model = "text-embedding-3-large" # litellm_embedding_model = "text-embedding-3-large"
litellm_embedding_dimensions = 3072 # litellm_embedding_dimensions = 3072
graphistry_username = os.getenv("GRAPHISTRY_USERNAME") graphistry_username = os.getenv("GRAPHISTRY_USERNAME")
graphistry_password = os.getenv("GRAPHISTRY_PASSWORD") graphistry_password = os.getenv("GRAPHISTRY_PASSWORD")
@ -90,40 +87,9 @@ class Config:
# Monitoring tool # Monitoring tool
monitoring_tool: str = os.getenv("MONITORING_TOOL", MonitoringTool.LANGFUSE) monitoring_tool: str = os.getenv("MONITORING_TOOL", MonitoringTool.LANGFUSE)
if (
os.getenv("ENV") == "prod"
or os.getenv("ENV") == "dev"
or os.getenv("AWS_ENV") == "dev"
or os.getenv("AWS_ENV") == "prd"
):
load_dotenv()
logging.info("graph_db_url: %s", os.getenv("GRAPH_DB_URL_PROD"))
graph_database_url: str = os.getenv("GRAPH_DB_URL_PROD")
graph_database_username: str = os.getenv("GRAPH_DB_USER")
graph_database_password: str = os.getenv("GRAPH_DB_PW")
else:
logging.info("graph_db_url: %s", os.getenv("GRAPH_DB_URL"))
graph_database_url: str = os.getenv("GRAPH_DB_URL")
graph_database_username: str = os.getenv("GRAPH_DB_USER")
graph_database_password: str = os.getenv("GRAPH_DB_PW")
weaviate_url: str = os.getenv("WEAVIATE_URL") weaviate_url: str = os.getenv("WEAVIATE_URL")
weaviate_api_key: str = os.getenv("WEAVIATE_API_KEY") weaviate_api_key: str = os.getenv("WEAVIATE_API_KEY")
if (
os.getenv("ENV") == "prod"
or os.getenv("ENV") == "dev"
or os.getenv("AWS_ENV") == "dev"
or os.getenv("AWS_ENV") == "prd"
):
load_dotenv()
db_host: str = os.getenv("POSTGRES_HOST")
logging.info("db_host: %s", db_host)
db_user: str = os.getenv("POSTGRES_USER")
db_password: str = os.getenv("POSTGRES_PASSWORD")
db_name: str = os.getenv("POSTGRES_DB")
# Model parameters and configuration for interlayer scoring # Model parameters and configuration for interlayer scoring
intra_layer_score_treshold: float = 0.98 intra_layer_score_treshold: float = 0.98
@ -132,13 +98,16 @@ class Config:
anon_clientid: Optional[str] = field(default_factory=lambda: uuid.uuid4().hex) anon_clientid: Optional[str] = field(default_factory=lambda: uuid.uuid4().hex)
#Chunking parameters #Chunking parameters
chunk_size: int = 1500 # chunk_size: int = 1500
chunk_overlap: int = 0 # chunk_overlap: int = 0
chunk_strategy: str = ChunkStrategy.PARAGRAPH # chunk_strategy: str = ChunkStrategy.PARAGRAPH
def load(self): def load(self):
"""Loads the configuration from a file or environment variables.""" """Loads the configuration from a file or environment variables."""
load_dontenv() try:
load_dontenv()
except:
pass
config = configparser.ConfigParser() config = configparser.ConfigParser()
config.read(self.config_path) config.read(self.config_path)

View file

@ -1,7 +1,11 @@
import logging
import os
from cognee.config import Config from cognee.config import Config
from .data.chunking.config import get_chunk_config
from .databases.relational import DuckDBAdapter, DatabaseEngine from .databases.relational import DuckDBAdapter, DatabaseEngine
from .databases.vector.vector_db_interface import VectorDBInterface from .databases.vector.vector_db_interface import VectorDBInterface
from .databases.vector.embeddings.DefaultEmbeddingEngine import DefaultEmbeddingEngine # from .databases.vector.embeddings.DefaultEmbeddingEngine import DefaultEmbeddingEngine
from .llm.llm_interface import LLMInterface from .llm.llm_interface import LLMInterface
from .llm.get_llm_client import get_llm_client from .llm.get_llm_client import get_llm_client
from .files.storage import LocalStorage from .files.storage import LocalStorage
@ -9,10 +13,16 @@ from .data.chunking.DefaultChunkEngine import DefaultChunkEngine
from ..shared.data_models import GraphDBType, DefaultContentPrediction, KnowledgeGraph, SummarizedContent, \ from ..shared.data_models import GraphDBType, DefaultContentPrediction, KnowledgeGraph, SummarizedContent, \
LabeledContent, DefaultCognitiveLayer LabeledContent, DefaultCognitiveLayer
logging.basicConfig(level=logging.DEBUG)
config = Config() config = Config()
config.load() config.load()
from cognee.infrastructure.databases.relational.config import get_relationaldb_config
from cognee.infrastructure.databases.vector.config import get_vectordb_config
vector_db_config = get_vectordb_config()
relational = get_relationaldb_config()
chunk_config = get_chunk_config()
class InfrastructureConfig(): class InfrastructureConfig():
system_root_directory: str = config.system_root_directory system_root_directory: str = config.system_root_directory
data_root_directory: str = config.data_root_directory data_root_directory: str = config.data_root_directory
llm_provider: str = config.llm_provider llm_provider: str = config.llm_provider
@ -31,7 +41,7 @@ class InfrastructureConfig():
connect_documents = config.connect_documents connect_documents = config.connect_documents
database_directory_path: str = None database_directory_path: str = None
database_file_path: str = None database_file_path: str = None
chunk_strategy = config.chunk_strategy chunk_strategy = chunk_config.chunk_strategy
chunk_engine = None chunk_engine = None
graph_topology = config.graph_topology graph_topology = config.graph_topology
monitoring_tool = config.monitoring_tool monitoring_tool = config.monitoring_tool
@ -41,15 +51,15 @@ class InfrastructureConfig():
llm_api_key: str = None llm_api_key: str = None
def get_config(self, config_entity: str = None) -> dict: def get_config(self, config_entity: str = None) -> dict:
if (config_entity is None or config_entity == "database_engine") and self.database_engine is None: if (config_entity is None or config_entity == "database_engine") and self.database_engine is None:
db_path = self.system_root_directory + "/" + config.db_path
db_path = os.path.join(self.system_root_directory,relational.db_path)
LocalStorage.ensure_directory_exists(db_path) LocalStorage.ensure_directory_exists(db_path)
self.database_engine = DuckDBAdapter( self.database_engine = relational.db_engine
db_name = config.db_name,
db_path = db_path
)
if self.graph_engine is None: if self.graph_engine is None:
self.graph_engine = GraphDBType.NETWORKX self.graph_engine = GraphDBType.NETWORKX
@ -72,17 +82,17 @@ class InfrastructureConfig():
if self.intra_layer_score_treshold is None: if self.intra_layer_score_treshold is None:
self.intra_layer_score_treshold = config.intra_layer_score_treshold self.intra_layer_score_treshold = config.intra_layer_score_treshold
if self.embedding_engine is None: # if self.embedding_engine is None:
self.embedding_engine = DefaultEmbeddingEngine() # self.embedding_engine = DefaultEmbeddingEngine()
if self.connect_documents is None: if self.connect_documents is None:
self.connect_documents = config.connect_documents self.connect_documents = config.connect_documents
if self.chunk_strategy is None: if self.chunk_strategy is None:
self.chunk_strategy = config.chunk_strategy self.chunk_strategy = chunk_config.chunk_strategy
if self.chunk_engine is None: if self.chunk_engine is None:
self.chunk_engine = DefaultChunkEngine() self.chunk_engine = chunk_config.chunk_engine
if self.graph_topology is None: if self.graph_topology is None:
self.graph_topology = config.graph_topology self.graph_topology = config.graph_topology
@ -91,13 +101,13 @@ class InfrastructureConfig():
self.llm_engine = get_llm_client() self.llm_engine = get_llm_client()
if (config_entity is None or config_entity == "database_directory_path") and self.database_directory_path is None: if (config_entity is None or config_entity == "database_directory_path") and self.database_directory_path is None:
self.database_directory_path = self.system_root_directory + "/" + config.db_path self.database_directory_path = self.system_root_directory + "/" + relational.db_path
if self.database_directory_path is None: if self.database_directory_path is None:
self.database_directory_path = self.system_root_directory + "/" + config.db_path self.database_directory_path = self.system_root_directory + "/" + relational.db_path
if (config_entity is None or config_entity == "database_file_path") and self.database_file_path is None: if (config_entity is None or config_entity == "database_file_path") and self.database_file_path is None:
self.database_file_path = self.system_root_directory + "/" + config.db_path + "/" + config.db_name
self.database_file_path = self.system_root_directory + "/" + relational.db_path + "/" + relational.db_name
if (config_entity is None or config_entity == "vector_engine") and self.vector_engine is None: if (config_entity is None or config_entity == "vector_engine") and self.vector_engine is None:
try: try:
@ -126,17 +136,8 @@ class InfrastructureConfig():
) )
self.vector_engine_choice = "qdrant" self.vector_engine_choice = "qdrant"
else: else:
from .databases.vector.lancedb.LanceDBAdapter import LanceDBAdapter self.vector_engine = vector_db_config.vector_engine
config.load() self.vector_engine_choice = vector_db_config.vector_engine_choice
lance_db_path = self.database_directory_path + "/cognee.lancedb"
LocalStorage.ensure_directory_exists(lance_db_path)
self.vector_engine = LanceDBAdapter(
url = lance_db_path,
api_key = None,
embedding_engine = self.embedding_engine,
)
self.vector_engine_choice = "lancedb"
if config_entity is not None: if config_entity is not None:
return getattr(self, config_entity) return getattr(self, config_entity)

View file

@ -0,0 +1,27 @@
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
from cognee.infrastructure.data.chunking.DefaultChunkEngine import DefaultChunkEngine
from cognee.shared.data_models import ChunkStrategy
class ChunkConfig(BaseSettings):
chunk_size: int = 1500
chunk_overlap: int = 0
chunk_strategy: object = ChunkStrategy.PARAGRAPH
chunk_engine: object = DefaultChunkEngine()
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")
def to_dict(self) -> dict:
return {
"chunk_size": self.chunk_size,
"chunk_overlap": self.chunk_overlap,
"chunk_strategy": self.chunk_strategy
}
@lru_cache
def get_chunk_config():
return ChunkConfig()

View file

@ -0,0 +1,40 @@
""" This module contains the configuration for the graph database. """
import os
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
from cognee.base_config import get_base_config
from cognee.infrastructure.databases.relational.config import get_relationaldb_config
from cognee.shared.data_models import DefaultGraphModel, GraphDBType
relational_config = get_relationaldb_config()
base_config = get_base_config()
class GraphConfig(BaseSettings):
graph_filename: str = "cognee_graph.pkl"
graph_database_provider: str = "NETWORKX"
graph_database_url: str = ""
graph_database_username: str = ""
graph_database_password: str = ""
graph_database_port: int = 123
graph_file_path: str = os.path.join(relational_config.database_directory_path,graph_filename)
graph_engine: object = GraphDBType.NETWORKX
graph_model: object = DefaultGraphModel
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")
def to_dict(self) -> dict:
return {
"graph_filename": self.graph_filename,
"graph_database_provider": self.graph_database_provider,
"graph_topology": self.graph_topology,
"graph_file_path": self.graph_file_path,
"graph_database_url": self.graph_database_url,
"graph_database_username": self.graph_database_username,
"graph_database_password": self.graph_database_password,
"graph_database_port": self.graph_database_port,
"graph_engine": self.graph_engine
}
@lru_cache
def get_graph_config():
return GraphConfig()

View file

@ -1,18 +1,14 @@
"""Factory function to get the appropriate graph client based on the graph type.""" """Factory function to get the appropriate graph client based on the graph type."""
from cognee.config import Config
from cognee.shared.data_models import GraphDBType from cognee.shared.data_models import GraphDBType
from cognee.infrastructure import infrastructure_config from .config import get_graph_config
from .graph_db_interface import GraphDBInterface from .graph_db_interface import GraphDBInterface
from .networkx.adapter import NetworkXAdapter from .networkx.adapter import NetworkXAdapter
config = get_graph_config()
config = Config()
config.load()
async def get_graph_client(graph_type: GraphDBType, graph_file_name: str = None) -> GraphDBInterface : async def get_graph_client(graph_type: GraphDBType, graph_file_name: str = None) -> GraphDBInterface :
"""Factory function to get the appropriate graph client based on the graph type.""" """Factory function to get the appropriate graph client based on the graph type."""
graph_file_path = f"{infrastructure_config.get_config('database_directory_path')}/{graph_file_name if graph_file_name else config.graph_filename}"
if graph_type == GraphDBType.NEO4J: if graph_type == GraphDBType.NEO4J:
try: try:
@ -25,10 +21,20 @@ async def get_graph_client(graph_type: GraphDBType, graph_file_name: str = None)
) )
except: except:
pass pass
graph_client = NetworkXAdapter(filename = graph_file_path)
elif graph_type == GraphDBType.FALKORDB:
try:
from .falkordb.adapter import FalcorDBAdapter
return FalcorDBAdapter(
graph_database_url = config.graph_database_url,
graph_database_username = config.graph_database_username,
graph_database_password = config.graph_database_password,
graph_database_port = config.graph_database_port
)
except:
pass
graph_client = NetworkXAdapter(filename = config.graph_file_path)
if (graph_client.graph is None): if (graph_client.graph is None):
await graph_client.load_graph_from_file() await graph_client.load_graph_from_file()

View file

@ -56,12 +56,6 @@ class Neo4jAdapter(GraphDBInterface):
if "name" not in serialized_properties: if "name" not in serialized_properties:
serialized_properties["name"] = node_id serialized_properties["name"] = node_id
# serialized_properties["created_at"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# serialized_properties["updated_at"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# properties = ", ".join(f"{property_name}: ${property_name}" for property_name in serialized_properties.keys())
query = f"""MERGE (node:`{node_id}` {{id: $node_id}}) query = f"""MERGE (node:`{node_id}` {{id: $node_id}})
ON CREATE SET node += $properties ON CREATE SET node += $properties
RETURN ID(node) AS internal_id, node.id AS nodeId""" RETURN ID(node) AS internal_id, node.id AS nodeId"""
@ -85,30 +79,6 @@ class Neo4jAdapter(GraphDBInterface):
node_properties = node_properties, node_properties = node_properties,
) )
# serialized_properties = self.serialize_properties(node_properties)
# if "name" not in serialized_properties:
# serialized_properties["name"] = node_id
# nodes_data.append({
# "node_id": node_id,
# "properties": serialized_properties,
# })
# query = """UNWIND $nodes_data AS node_data
# MERGE (node:{id: node_data.node_id})
# ON CREATE SET node += node_data.properties
# RETURN ID(node) AS internal_id, node.id AS id"""
# params = {"nodes_data": nodes_data}
# result = await self.query(query, params)
# await self.close()
# return result
async def extract_node_description(self, node_id: str): async def extract_node_description(self, node_id: str):
query = """MATCH (n)-[r]->(m) query = """MATCH (n)-[r]->(m)
WHERE n.id = $node_id WHERE n.id = $node_id
@ -138,7 +108,7 @@ class Neo4jAdapter(GraphDBInterface):
query = """MATCH (node) WHERE node.layer_id IS NOT NULL query = """MATCH (node) WHERE node.layer_id IS NOT NULL
RETURN node""" RETURN node"""
return [result['node'] for result in (await self.query(query))] return [result["node"] for result in (await self.query(query))]
async def extract_node(self, node_id: str): async def extract_node(self, node_id: str):
query= """ query= """
@ -146,7 +116,7 @@ class Neo4jAdapter(GraphDBInterface):
RETURN node RETURN node
""" """
results = [node['node'] for node in (await self.query(query, dict(node_id = node_id)))] results = [node["node"] for node in (await self.query(query, dict(node_id = node_id)))]
return results[0] if len(results) > 0 else None return results[0] if len(results) > 0 else None
@ -163,10 +133,12 @@ class Neo4jAdapter(GraphDBInterface):
from_node = from_node.replace(":", "_") from_node = from_node.replace(":", "_")
to_node = to_node.replace(":", "_") to_node = to_node.replace(":", "_")
query = f"""MATCH (from_node:`{from_node}` {{id: $from_node}}), (to_node:`{to_node}` {{id: $to_node}}) query = f"""MATCH (from_node:`{from_node}`
MERGE (from_node)-[r:`{relationship_name}`]->(to_node) {{id: $from_node}}),
SET r += $properties (to_node:`{to_node}` {{id: $to_node}})
RETURN r""" MERGE (from_node)-[r:`{relationship_name}`]->(to_node)
SET r += $properties
RETURN r"""
params = { params = {
"from_node": from_node, "from_node": from_node,
@ -192,30 +164,6 @@ class Neo4jAdapter(GraphDBInterface):
edge_properties = edge_properties edge_properties = edge_properties
) )
# Filter out None values and do not serialize; Neo4j can handle complex types like arrays directly
# serialized_properties = self.serialize_properties(edge_properties)
# edges_data.append({
# "from_node": from_node,
# "to_node": to_node,
# "relationship_name": relationship_name,
# "properties": serialized_properties
# })
# query = """UNWIND $edges_data AS edge_data
# MATCH (from_node:{id: edge_data.from_node}), (to_node:{id: edge_data.to_node})
# MERGE (from_node)-[r:{edge_data.relationship_name}]->(to_node)
# ON CREATE SET r += edge_data.properties
# RETURN r"""
# params = {"edges_data": edges_data}
# result = await self.query(query, params)
# await self.close()
# return result
async def filter_nodes(self, search_criteria): async def filter_nodes(self, search_criteria):
query = f"""MATCH (node) query = f"""MATCH (node)

View file

@ -0,0 +1,40 @@
import os
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
from cognee.infrastructure.databases.relational import DuckDBAdapter
from cognee.base_config import get_base_config
config = get_base_config()
class RelationalConfig(BaseSettings):
db_path: str = os.path.join(config.system_root_directory,"databases")
db_name: str = "cognee.db"
db_host: str = "localhost"
db_port: str = "5432"
db_user: str = "cognee"
db_password: str = "cognee"
db_engine: object = DuckDBAdapter(
db_name=db_name,
db_path=db_path
)
database_engine: object = db_engine
db_file_path:str = os.path.join(db_path, db_name)
database_path: str = os.path.join(config.system_root_directory, "databases")
database_directory_path: str = db_path
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")
def to_dict(self) -> dict:
return {
"db_path": self.db_path,
"db_name": self.db_name,
"db_host": self.db_host,
"db_port": self.db_port,
"db_user": self.db_user,
"db_password": self.db_password,
"db_engine": self.db_engine,
"database_path": self.database_path,
}
@lru_cache
def get_relationaldb_config():
return RelationalConfig()

View file

@ -1,8 +1,9 @@
import duckdb import duckdb
import os
class DuckDBAdapter(): class DuckDBAdapter():
def __init__(self, db_path: str, db_name: str): def __init__(self, db_path: str, db_name: str):
db_location = db_path + "/" + db_name
db_location = os.path.abspath(os.path.join(db_path, db_name))
self.get_connection = lambda: duckdb.connect(db_location) self.get_connection = lambda: duckdb.connect(db_location)

View file

@ -0,0 +1,38 @@
import os
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
from cognee.infrastructure.databases.relational.config import get_relationaldb_config
from cognee.infrastructure.databases.vector.lancedb.LanceDBAdapter import LanceDBAdapter
from cognee.infrastructure.databases.vector.embeddings.config import get_embedding_config
from cognee.infrastructure.files.storage import LocalStorage
embeddings_config = get_embedding_config()
relational_config = get_relationaldb_config()
class VectorConfig(BaseSettings):
vector_db_url: str = ""
vector_db_key: str = ""
vector_db_path: str = os.path.join(relational_config.database_directory_path + "cognee.lancedb")
vector_engine: object = LanceDBAdapter(
url = vector_db_path,
api_key = None,
embedding_engine = embeddings_config.embedding_engine,
)
vector_engine_choice:str = "lancedb"
LocalStorage.ensure_directory_exists(vector_db_path)
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")
def to_dict(self) -> dict:
return {
"vector_db_url": self.vector_db_url,
"vector_db_key": self.vector_db_key,
"vector_db_path": self.vector_db_path,
"vector_engine": self.vector_engine,
"vector_engine_choice": self.vector_engine_choice,
}
@lru_cache
def get_vectordb_config():
return VectorConfig()

View file

@ -1,53 +1,59 @@
import asyncio import asyncio
from typing import List from typing import List, Optional
import instructor
from openai import AsyncOpenAI from openai import AsyncOpenAI
from fastembed import TextEmbedding from fastembed import TextEmbedding
from cognee.config import Config
from cognee.root_dir import get_absolute_path from cognee.root_dir import get_absolute_path
from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine
from litellm import aembedding from litellm import aembedding
import litellm import litellm
litellm.set_verbose = True litellm.set_verbose = True
config = Config()
config.load()
class DefaultEmbeddingEngine(EmbeddingEngine): class DefaultEmbeddingEngine(EmbeddingEngine):
embedding_model: str
embedding_dimensions: int
def __init__(
self,
embedding_model: Optional[str],
embedding_dimensions: Optional[int],
):
self.embedding_model = embedding_model
self.embedding_dimensions = embedding_dimensions
async def embed_text(self, text: List[str]) -> List[float]: async def embed_text(self, text: List[str]) -> List[float]:
embedding_model = TextEmbedding(model_name = config.embedding_model, cache_dir = get_absolute_path("cache/embeddings")) embedding_model = TextEmbedding(model_name = self.embedding_model, cache_dir = get_absolute_path("cache/embeddings"))
embeddings_list = list(map(lambda embedding: embedding.tolist(), embedding_model.embed(text))) embeddings_list = list(map(lambda embedding: embedding.tolist(), embedding_model.embed(text)))
return embeddings_list return embeddings_list
def get_vector_size(self) -> int: def get_vector_size(self) -> int:
return config.embedding_dimensions return self.embedding_dimensions
class LiteLLMEmbeddingEngine(EmbeddingEngine): class LiteLLMEmbeddingEngine(EmbeddingEngine):
embedding_model: str
embedding_dimensions: int
def __init__(
self,
embedding_model: Optional[str],
embedding_dimensions: Optional[int],
):
self.embedding_model = embedding_model
self.embedding_dimensions = embedding_dimensions
import asyncio import asyncio
from typing import List from typing import List
async def embed_text(self, text: List[str]) -> List[List[float]]: async def embed_text(self, text: List[str]) -> List[List[float]]:
async def get_embedding(text_): async def get_embedding(text_):
response = await aembedding(config.litellm_embedding_model, input=text_) response = await aembedding(self.embedding_model, input=text_)
return response.data[0]['embedding'] return response.data[0]['embedding']
tasks = [get_embedding(text_) for text_ in text] tasks = [get_embedding(text_) for text_ in text]
result = await asyncio.gather(*tasks) result = await asyncio.gather(*tasks)
return result return result
# embedding = response.data[0].embedding
# # embeddings_list = list(map(lambda embedding: embedding.tolist(), embedding_model.embed(text)))
# print("response", type(response.data[0]['embedding']))
# print("response", response.data[0])
# return [response.data[0]['embedding']]
def get_vector_size(self) -> int: def get_vector_size(self) -> int:
return config.litellm_embedding_dimensions return self.embedding_dimensions
if __name__ == "__main__": if __name__ == "__main__":

View file

@ -0,0 +1,26 @@
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
from cognee.infrastructure.databases.vector.embeddings.DefaultEmbeddingEngine import DefaultEmbeddingEngine
class EmbeddingConfig(BaseSettings):
openai_embedding_model: str = "text-embedding-3-large"
openai_embedding_dimensions: int = 3072
litellm_embedding_model: str = "BAAI/bge-large-en-v1.5"
litellm_embedding_dimensions: int = 1024
embedding_engine:object = DefaultEmbeddingEngine(embedding_model=litellm_embedding_model, embedding_dimensions=litellm_embedding_dimensions)
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")
def to_dict(self) -> dict:
return {
"openai_embedding_model": self.openai_embedding_model,
"openai_embedding_dimensions": self.openai_embedding_dimensions,
"litellm_embedding_model": self.litellm_embedding_model,
"litellm_embedding_dimensions": self.litellm_embedding_dimensions,
}
@lru_cache
def get_embedding_config():
return EmbeddingConfig()

View file

@ -0,0 +1,54 @@
from typing import List, Dict, Optional, Any
from falkordb import FalkorDB
from qdrant_client import AsyncQdrantClient, models
from ..vector_db_interface import VectorDBInterface
from ..models.DataPoint import DataPoint
from ..embeddings.EmbeddingEngine import EmbeddingEngine
class FalcorDBAdapter(VectorDBInterface):
def __init__(
self,
graph_database_url: str,
graph_database_username: str,
graph_database_password: str,
graph_database_port: int,
driver: Optional[Any] = None,
embedding_engine = EmbeddingEngine,
graph_name: str = "DefaultGraph",
):
self.driver = FalkorDB(
host = graph_database_url,
port = graph_database_port)
self.graph_name = graph_name
self.embedding_engine = embedding_engine
async def embed_data(self, data: list[str]) -> list[list[float]]:
return await self.embedding_engine.embed_text(data)
async def create_collection(self, collection_name: str, payload_schema = None):
pass
async def create_data_points(self, collection_name: str, data_points: List[DataPoint]):
pass
async def retrieve(self, collection_name: str, data_point_id: str):
pass
async def search(
self,
collection_name: str,
query_text: str = None,
query_vector: List[float] = None,
limit: int = 10,
with_vector: bool = False,
):
pass

View file

@ -2,6 +2,7 @@ from typing import BinaryIO
from pypdf import PdfReader from pypdf import PdfReader
def extract_text_from_file(file: BinaryIO, file_type) -> str: def extract_text_from_file(file: BinaryIO, file_type) -> str:
"""Extract text from a file"""
if file_type.extension == "pdf": if file_type.extension == "pdf":
reader = PdfReader(stream = file) reader = PdfReader(stream = file)
pages = list(reader.pages[:3]) pages = list(reader.pages[:3])

View file

@ -11,6 +11,7 @@ class FileMetadata(TypedDict):
keywords: list[str] keywords: list[str]
def get_file_metadata(file: BinaryIO) -> FileMetadata: def get_file_metadata(file: BinaryIO) -> FileMetadata:
"""Get metadata from a file"""
file.seek(0) file.seek(0)
file_type = guess_file_type(file) file_type = guess_file_type(file)

View file

@ -1,4 +1,5 @@
import os import os
def get_file_size(file_path: str): def get_file_size(file_path: str):
"""Get the size of a file"""
return os.path.getsize(file_path) return os.path.getsize(file_path)

View file

@ -9,6 +9,7 @@ class FileTypeException(Exception):
self.message = message self.message = message
class TxtFileType(filetype.Type): class TxtFileType(filetype.Type):
"""Text file type"""
MIME = "text/plain" MIME = "text/plain"
EXTENSION = "txt" EXTENSION = "txt"

View file

@ -1,4 +1,5 @@
def is_text_content(content): def is_text_content(content):
"""Check if the content is text."""
# Check for null bytes # Check for null bytes
if b'\0' in content: if b'\0' in content:
return False return False

View file

@ -1 +1 @@
from .config import llm_config from .config import get_llm_config

View file

@ -1,9 +1,13 @@
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
class LLMConfig(): class LLMConfig(BaseSettings):
llm_provider: str = None llm_provider: str = "openai"
llm_model: str = None llm_model: str = "gpt-4o"
llm_endpoint: str = None llm_endpoint: str = ""
llm_api_key: str = None llm_api_key: str = ""
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")
def to_dict(self) -> dict: def to_dict(self) -> dict:
return { return {
@ -13,4 +17,6 @@ class LLMConfig():
"apiKey": self.llm_api_key, "apiKey": self.llm_api_key,
} }
llm_config = LLMConfig() @lru_cache
def get_llm_config():
return LLMConfig()

View file

@ -1,3 +1,4 @@
'''Adapter for Generic API LLM provider API'''
import asyncio import asyncio
from typing import List, Type from typing import List, Type
from pydantic import BaseModel from pydantic import BaseModel
@ -5,18 +6,20 @@ import instructor
from tenacity import retry, stop_after_attempt from tenacity import retry, stop_after_attempt
import openai import openai
from cognee.config import Config
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.llm.llm_interface import LLMInterface from cognee.infrastructure.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.prompts import read_query_prompt from cognee.infrastructure.llm.prompts import read_query_prompt
from cognee.shared.data_models import MonitoringTool from cognee.shared.data_models import MonitoringTool
from cognee.base_config import get_base_config
from cognee.infrastructure.llm.config import get_llm_config
config = Config() llm_config = get_llm_config()
config.load() base_config = get_base_config()
if config.monitoring_tool == MonitoringTool.LANGFUSE: if base_config.monitoring_tool == MonitoringTool.LANGFUSE:
from langfuse.openai import AsyncOpenAI, OpenAI from langfuse.openai import AsyncOpenAI, OpenAI
elif config.monitoring_tool == MonitoringTool.LANGSMITH: elif base_config.monitoring_tool == MonitoringTool.LANGSMITH:
from langsmith import wrappers from langsmith import wrappers
from openai import AsyncOpenAI from openai import AsyncOpenAI
AsyncOpenAI = wrappers.wrap_openai(AsyncOpenAI()) AsyncOpenAI = wrappers.wrap_openai(AsyncOpenAI())
@ -34,7 +37,7 @@ class GenericAPIAdapter(LLMInterface):
self.model = model self.model = model
self.api_key = api_key self.api_key = api_key
if infrastructure_config.get_config()["llm_provider"] == "groq": if llm_config.llm_provider == "groq":
from groq import groq from groq import groq
self.aclient = instructor.from_openai( self.aclient = instructor.from_openai(
client = groq.Groq( client = groq.Groq(

View file

@ -2,7 +2,11 @@
from enum import Enum from enum import Enum
import json import json
import logging import logging
from cognee.infrastructure.llm import llm_config # from cognee.infrastructure.llm import llm_config
from cognee.config import Config
from cognee.infrastructure.llm import get_llm_config
# Define an Enum for LLM Providers # Define an Enum for LLM Providers
class LLMProvider(Enum): class LLMProvider(Enum):
@ -11,9 +15,10 @@ class LLMProvider(Enum):
ANTHROPIC = "anthropic" ANTHROPIC = "anthropic"
CUSTOM = "custom" CUSTOM = "custom"
llm_config = get_llm_config()
def get_llm_client(): def get_llm_client():
"""Get the LLM client based on the configuration using Enums.""" """Get the LLM client based on the configuration using Enums."""
logging.error(json.dumps(llm_config.to_dict())) # logging.error(json.dumps(llm_config.to_dict()))
provider = LLMProvider(llm_config.llm_provider) provider = LLMProvider(llm_config.llm_provider)
if provider == LLMProvider.OPENAI: if provider == LLMProvider.OPENAI:

View file

@ -6,26 +6,6 @@ from pydantic import BaseModel
class LLMInterface(Protocol): class LLMInterface(Protocol):
""" LLM Interface """ """ LLM Interface """
# @abstractmethod
# async def async_get_embedding_with_backoff(self, text, model="text-embedding-ada-002"):
# """To get text embeddings, import/call this function"""
# raise NotImplementedError
#
# @abstractmethod
# def get_embedding_with_backoff(self, text: str, model: str = "text-embedding-ada-002"):
# """To get text embeddings, import/call this function"""
# raise NotImplementedError
#
# @abstractmethod
# async def async_get_batch_embeddings_with_backoff(self, texts: List[str], models: List[str]):
# """To get multiple text embeddings in parallel, import/call this function"""
# raise NotImplementedError
# """ Get completions """
# async def acompletions_with_backoff(self, **kwargs):
# raise NotImplementedError
#
""" Structured output """
@abstractmethod @abstractmethod
async def acreate_structured_output(self, async def acreate_structured_output(self,
text_input: str, text_input: str,

View file

@ -5,20 +5,24 @@ import instructor
from pydantic import BaseModel from pydantic import BaseModel
from tenacity import retry, stop_after_attempt from tenacity import retry, stop_after_attempt
from cognee.base_config import get_base_config
from cognee.config import Config from cognee.config import Config
from cognee.infrastructure.llm import get_llm_config
from cognee.infrastructure.llm.llm_interface import LLMInterface from cognee.infrastructure.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.prompts import read_query_prompt from cognee.infrastructure.llm.prompts import read_query_prompt
from cognee.shared.data_models import MonitoringTool from cognee.shared.data_models import MonitoringTool
config = Config() config = Config()
config.load() config.load()
llm_config = get_llm_config()
base_config = get_base_config()
if config.monitoring_tool == MonitoringTool.LANGFUSE: if base_config.monitoring_tool == MonitoringTool.LANGFUSE:
from langfuse.openai import AsyncOpenAI, OpenAI from langfuse.openai import AsyncOpenAI, OpenAI
elif config.monitoring_tool == MonitoringTool.LANGSMITH: elif base_config.monitoring_tool == MonitoringTool.LANGSMITH:
from langsmith import wrap_openai from langsmith import wrappers
from openai import AsyncOpenAI from openai import AsyncOpenAI
AsyncOpenAI = wrap_openai(AsyncOpenAI()) AsyncOpenAI = wrappers.wrap_openai(AsyncOpenAI())
else: else:
from openai import AsyncOpenAI, OpenAI from openai import AsyncOpenAI, OpenAI

View file

@ -0,0 +1,44 @@
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
from cognee.root_dir import get_absolute_path
from cognee.shared.data_models import MonitoringTool, DefaultContentPrediction, LabeledContent, SummarizedContent, \
DefaultCognitiveLayer, DefaultGraphModel, KnowledgeGraph
# Monitoring tool
class CognifyConfig(BaseSettings):
classification_model: object = DefaultContentPrediction
summarization_model: object = SummarizedContent
labeling_model: object = LabeledContent
cognitive_layer_model: object = DefaultCognitiveLayer
intra_layer_score_treshold: float = 0.98
connect_documents: bool = False
graph_topology: object = DefaultGraphModel
cognitive_layers_limit: int = 2
graph_model:object = KnowledgeGraph
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")
def to_dict(self) -> dict:
return {
"classification_model": self.classification_model,
"summarization_model": self.summarization_model,
"labeling_model": self.labeling_model,
"cognitive_layer_model": self.cognitive_layer_model,
"intra_layer_score_treshold": self.intra_layer_score_treshold,
"connect_documents": self.connect_documents,
"graph_topology": self.graph_topology,
"cognitive_layers_limit": self.cognitive_layers_limit,
"graph_model": self.graph_model
}
@lru_cache
def get_cognify_config():
return CognifyConfig()

View file

@ -2,10 +2,12 @@ from datetime import datetime
from uuid import uuid4 from uuid import uuid4
from typing import List, Tuple, TypedDict from typing import List, Tuple, TypedDict
from pydantic import BaseModel from pydantic import BaseModel
from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.databases.vector import DataPoint from cognee.infrastructure.databases.vector import DataPoint
from cognee.utils import extract_pos_tags, extract_named_entities, extract_sentiment_vader from cognee.utils import extract_pos_tags, extract_named_entities, extract_sentiment_vader
from cognee.infrastructure.databases.graph.config import get_graph_config
from cognee.infrastructure.databases.vector.config import get_vectordb_config
graph_config = get_graph_config()
vectordb_config = get_vectordb_config()
class GraphLike(TypedDict): class GraphLike(TypedDict):
nodes: List nodes: List
edges: List edges: List
@ -17,8 +19,8 @@ async def add_cognitive_layer_graphs(
chunk_id: str, chunk_id: str,
layer_graphs: List[Tuple[str, GraphLike]], layer_graphs: List[Tuple[str, GraphLike]],
): ):
vector_client = infrastructure_config.get_config("vector_engine") vector_client = vectordb_config.vector_engine
graph_model = infrastructure_config.get_config("graph_model") graph_model = graph_config.graph_model
for (layer_id, layer_graph) in layer_graphs: for (layer_id, layer_graph) in layer_graphs:
graph_nodes = [] graph_nodes = []

View file

@ -2,15 +2,19 @@
from typing import TypedDict from typing import TypedDict
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.databases.vector.config import get_vectordb_config
from cognee.infrastructure.databases.vector import DataPoint from cognee.infrastructure.databases.vector import DataPoint
config = get_vectordb_config()
class TextChunk(TypedDict): class TextChunk(TypedDict):
text: str text: str
chunk_id: str chunk_id: str
file_metadata: dict file_metadata: dict
async def add_data_chunks(dataset_data_chunks: dict[str, list[TextChunk]]): async def add_data_chunks(dataset_data_chunks: dict[str, list[TextChunk]]):
vector_client = infrastructure_config.get_config("vector_engine") vector_client = config.vector_engine
identified_chunks = [] identified_chunks = []
@ -52,7 +56,7 @@ async def add_data_chunks(dataset_data_chunks: dict[str, list[TextChunk]]):
async def add_data_chunks_basic_rag(dataset_data_chunks: dict[str, list[TextChunk]]): async def add_data_chunks_basic_rag(dataset_data_chunks: dict[str, list[TextChunk]]):
vector_client = infrastructure_config.get_config("vector_engine") vector_client = config.vector_engine
identified_chunks = [] identified_chunks = []

View file

@ -2,11 +2,14 @@ from uuid import uuid4
from typing import List from typing import List
from datetime import datetime from datetime import datetime
from pydantic import BaseModel from pydantic import BaseModel
from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.databases.vector import DataPoint
from cognee.infrastructure.databases.vector import DataPoint
from cognee.infrastructure.databases.graph.config import get_graph_config
from cognee.infrastructure.databases.vector.config import get_vectordb_config
graph_config = get_graph_config()
vectordb_config = get_vectordb_config()
async def add_label_nodes(graph_client, parent_node_id: str, keywords: List[str]) -> None: async def add_label_nodes(graph_client, parent_node_id: str, keywords: List[str]) -> None:
vector_client = infrastructure_config.get_config("vector_engine") vector_client = vectordb_config.vector_engine
keyword_nodes = [] keyword_nodes = []

View file

@ -1,9 +1,12 @@
import uuid import uuid
from cognee.infrastructure import infrastructure_config # from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client
from cognee.shared.data_models import GraphDBType from cognee.shared.data_models import GraphDBType
from cognee.infrastructure.databases.graph.config import get_graph_config
from cognee.infrastructure.databases.vector.config import get_vectordb_config
graph_config = get_graph_config()
vectordb_config = get_vectordb_config()
async def group_nodes_by_layer(node_descriptions): async def group_nodes_by_layer(node_descriptions):
@ -41,7 +44,7 @@ async def connect_nodes_in_graph(graph, relationship_dict, score_threshold=0.9):
if relationship['score'] > score_threshold: if relationship['score'] > score_threshold:
# For NetworkX # For NetworkX
if infrastructure_config.get_config()["graph_engine"] == GraphDBType.NETWORKX: if graph_config.graph_engine == GraphDBType.NETWORKX:
searched_node_id_found = await get_node_by_unique_id(graph.graph, relationship['searched_node_id']) searched_node_id_found = await get_node_by_unique_id(graph.graph, relationship['searched_node_id'])
original_id_for_search_found = await get_node_by_unique_id(graph.graph, relationship['original_id_for_search']) original_id_for_search_found = await get_node_by_unique_id(graph.graph, relationship['original_id_for_search'])
if searched_node_id_found and original_id_for_search_found: if searched_node_id_found and original_id_for_search_found:
@ -54,7 +57,7 @@ async def connect_nodes_in_graph(graph, relationship_dict, score_threshold=0.9):
) )
# For Neo4j # For Neo4j
elif infrastructure_config.get_config()["graph_engine"] == GraphDBType.NEO4J: elif graph_config.graph_engine == GraphDBType.NEO4J:
# Neo4j specific logic to add an edge # Neo4j specific logic to add an edge
# This is just a placeholder, replace it with actual Neo4j logic # This is just a placeholder, replace it with actual Neo4j logic
print("query is ", f"""MATCH (a), (b) WHERE a.unique_id = '{relationship['searched_node_id']}' AND b.unique_id = '{relationship['original_id_for_search']}' CREATE (a)-[:CONNECTED {{weight:{relationship['score']}}}]->(b)""") print("query is ", f"""MATCH (a), (b) WHERE a.unique_id = '{relationship['searched_node_id']}' AND b.unique_id = '{relationship['original_id_for_search']}' CREATE (a)-[:CONNECTED {{weight:{relationship['score']}}}]->(b)""")

View file

@ -2,10 +2,13 @@
from typing import Optional, Any from typing import Optional, Any
from pydantic import BaseModel from pydantic import BaseModel
from cognee.infrastructure import infrastructure_config # from cognee.infrastructure import infrastructure_config
from cognee.shared.data_models import GraphDBType from cognee.shared.data_models import GraphDBType
from cognee.infrastructure.databases.graph.config import get_graph_config
from cognee.infrastructure.databases.vector.config import get_vectordb_config
graph_config = get_graph_config()
vectordb_config = get_vectordb_config()
async def generate_node_id(instance: BaseModel) -> str: async def generate_node_id(instance: BaseModel) -> str:
for field in ["id", "doc_id", "location_id", "type_id", "node_id"]: for field in ["id", "doc_id", "location_id", "type_id", "node_id"]:
if hasattr(instance, field): if hasattr(instance, field):
@ -30,7 +33,7 @@ async def add_node(client, parent_id: Optional[str], node_id: str, node_data: di
- Exception: If there is an error during the node or edge addition process, it logs the error and continues without interrupting the execution flow. - Exception: If there is an error during the node or edge addition process, it logs the error and continues without interrupting the execution flow.
Note: Note:
- The function currently supports adding edges only if the graph database engine is NETWORKX, as specified in the global `infrastructure_config`. - The function currently supports adding edges only if the graph database engine is NETWORKX, as specified in the graph configuration.
""" """
# Initialize result to None to ensure a clear return path # Initialize result to None to ensure a clear return path
@ -46,7 +49,7 @@ async def add_node(client, parent_id: Optional[str], node_id: str, node_data: di
print("added node", result) print("added node", result)
# Add an edge if a parent ID is provided and the graph engine is NETWORKX # Add an edge if a parent ID is provided and the graph engine is NETWORKX
if parent_id and "default_relationship" in node_data and infrastructure_config.get_config()["graph_engine"] == GraphDBType.NETWORKX: if parent_id and "default_relationship" in node_data and graph_config.graph_engine == GraphDBType.NETWORKX:
try: try:
await client.add_edge(parent_id, node_id, relationship_name = node_data["default_relationship"]["type"], edge_properties = node_data) await client.add_edge(parent_id, node_id, relationship_name = node_data["default_relationship"]["type"], edge_properties = node_data)

View file

@ -1,6 +1,11 @@
from typing import Dict, List from typing import Dict, List
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.databases.graph.config import get_graph_config
from cognee.infrastructure.databases.vector.config import get_vectordb_config
graph_config = get_graph_config()
vectordb_config = get_vectordb_config()
async def resolve_cross_graph_references(nodes_by_layer: Dict): async def resolve_cross_graph_references(nodes_by_layer: Dict):
results = [] results = []
@ -16,7 +21,7 @@ async def resolve_cross_graph_references(nodes_by_layer: Dict):
return results return results
async def get_nodes_by_layer(layer_id: str, layer_nodes: List): async def get_nodes_by_layer(layer_id: str, layer_nodes: List):
vector_engine = infrastructure_config.get_config()["vector_engine"] vector_engine = vectordb_config.vector_engine
score_points = await vector_engine.batch_search( score_points = await vector_engine.batch_search(
layer_id, layer_id,

View file

@ -2,6 +2,9 @@ import logging
from typing import List, Dict from typing import List, Dict
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from.extraction.extract_cognitive_layers import extract_cognitive_layers from.extraction.extract_cognitive_layers import extract_cognitive_layers
from cognee.modules.cognify.config import get_cognify_config
config = get_cognify_config()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -10,7 +13,7 @@ async def get_cognitive_layers(content: str, categories: List[Dict]):
return (await extract_cognitive_layers( return (await extract_cognitive_layers(
content, content,
categories[0], categories[0],
infrastructure_config.get_config()["cognitive_layer_model"] config.cognitive_layer_model
)).cognitive_layers )).cognitive_layers
except Exception as error: except Exception as error:
logger.error("Error extracting cognitive layers from content: %s", error, exc_info = True) logger.error("Error extracting cognitive layers from content: %s", error, exc_info = True)

View file

@ -1,14 +1,15 @@
import logging import logging
from cognee.infrastructure import infrastructure_config
from .extraction.extract_categories import extract_categories from .extraction.extract_categories import extract_categories
from cognee.modules.cognify.config import get_cognify_config
config = get_cognify_config()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
async def get_content_categories(content: str): async def get_content_categories(content: str):
try: try:
return await extract_categories( return await extract_categories(
content, content,
infrastructure_config.get_config()["classification_model"] config.classification_model
) )
except Exception as error: except Exception as error:
logger.error("Error extracting categories from content: %s", error, exc_info = True) logger.error("Error extracting categories from content: %s", error, exc_info = True)

View file

@ -1,14 +1,16 @@
import logging import logging
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from.extraction.extract_summary import extract_summary from.extraction.extract_summary import extract_summary
from cognee.modules.cognify.config import get_cognify_config
config = get_cognify_config()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
async def get_content_summary(content: str): async def get_content_summary(content: str):
try: try:
return await extract_summary( return await extract_summary(
content, content,
infrastructure_config.get_config()["summarization_model"] config.summarization_model
) )
except Exception as error: except Exception as error:
logger.error("Error extracting summary from content: %s", error, exc_info = True) logger.error("Error extracting summary from content: %s", error, exc_info = True)

View file

@ -2,7 +2,9 @@ import logging
import asyncio import asyncio
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from .extraction.knowledge_graph.extract_knowledge_graph import extract_knowledge_graph from .extraction.knowledge_graph.extract_knowledge_graph import extract_knowledge_graph
from.extraction.extract_summary import extract_summary
from cognee.modules.cognify.config import get_cognify_config
config = get_cognify_config()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
async def get_layer_graphs(content: str, cognitive_layers: list[tuple[str, dict]]): async def get_layer_graphs(content: str, cognitive_layers: list[tuple[str, dict]]):
@ -11,7 +13,7 @@ async def get_layer_graphs(content: str, cognitive_layers: list[tuple[str, dict]
extract_knowledge_graph( extract_knowledge_graph(
content, content,
cognitive_layer_data["name"], cognitive_layer_data["name"],
infrastructure_config.get_config()["graph_model"] config.graph_model
) for (_, cognitive_layer_data) in cognitive_layers ) for (_, cognitive_layer_data) in cognitive_layers
] ]

View file

@ -3,11 +3,13 @@ from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.data import Dataset, Data from cognee.infrastructure.data import Dataset, Data
from cognee.infrastructure.files import remove_file_from_storage from cognee.infrastructure.files import remove_file_from_storage
from cognee.infrastructure.databases.relational import DatabaseEngine from cognee.infrastructure.databases.relational import DatabaseEngine
from cognee.infrastructure.databases.relational.config import get_relationaldb_config
config = get_relationaldb_config()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
async def add_data_to_dataset(dataset: Dataset, data: Data): async def add_data_to_dataset(dataset: Dataset, data: Data):
db_engine: DatabaseEngine = infrastructure_config.get_config()["database_engine"] db_engine: DatabaseEngine = config.database_engine
existing_dataset = (await db_engine.query_entity(dataset)).scalar() existing_dataset = (await db_engine.query_entity(dataset)).scalar()
existing_data = (await db_engine.query_entity(data)).scalar() existing_data = (await db_engine.query_entity(data)).scalar()

View file

@ -1,7 +1,8 @@
from typing import Protocol, BinaryIO from typing import Protocol, BinaryIO, Union
class IngestionData(Protocol): class IngestionData(Protocol):
data: str | BinaryIO = None data: Union[str, BinaryIO] = None
def get_data(self): def get_data(self):
raise NotImplementedError() raise NotImplementedError()

View file

@ -4,6 +4,8 @@
from typing import Union, Dict from typing import Union, Dict
import networkx as nx import networkx as nx
from cognee.shared.data_models import GraphDBType from cognee.shared.data_models import GraphDBType
from cognee.infrastructure.databases.graph.config import get_graph_config
graph_config = get_graph_config()
async def search_adjacent(graph: Union[nx.Graph, any], query: str, other_param: dict = None) -> Dict[str, str]: async def search_adjacent(graph: Union[nx.Graph, any], query: str, other_param: dict = None) -> Dict[str, str]:
""" """
Find the neighbours of a given node in the graph and return their descriptions. Find the neighbours of a given node in the graph and return their descriptions.
@ -22,7 +24,7 @@ async def search_adjacent(graph: Union[nx.Graph, any], query: str, other_param:
if node_id is None: if node_id is None:
return {} return {}
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
if infrastructure_config.get_config()["graph_engine"] == GraphDBType.NETWORKX: if graph_config.graph_engine == GraphDBType.NETWORKX:
if node_id not in graph: if node_id not in graph:
return {} return {}
@ -30,7 +32,7 @@ async def search_adjacent(graph: Union[nx.Graph, any], query: str, other_param:
neighbor_descriptions = {neighbor: graph.nodes[neighbor].get('description') for neighbor in neighbors} neighbor_descriptions = {neighbor: graph.nodes[neighbor].get('description') for neighbor in neighbors}
return neighbor_descriptions return neighbor_descriptions
elif infrastructure_config.get_config()["graph_engine"] == GraphDBType.NEO4J: elif graph_config.graph_engine == GraphDBType.NEO4J:
cypher_query = """ cypher_query = """
MATCH (node {id: $node_id})-[:CONNECTED_TO]->(neighbor) MATCH (node {id: $node_id})-[:CONNECTED_TO]->(neighbor)
RETURN neighbor.id AS neighbor_id, neighbor.description AS description RETURN neighbor.id AS neighbor_id, neighbor.description AS description

View file

@ -10,6 +10,11 @@ from cognee.modules.search.llm.extraction.categorize_relevant_category import ca
from cognee.shared.data_models import GraphDBType, DefaultContentPrediction from cognee.shared.data_models import GraphDBType, DefaultContentPrediction
import networkx as nx import networkx as nx
from cognee.infrastructure.databases.graph.config import get_graph_config
graph_config = get_graph_config()
from cognee.infrastructure.databases.vector.config import get_vectordb_config
vector_config = get_vectordb_config()
def strip_exact_regex(s, substring): def strip_exact_regex(s, substring):
# Escaping substring to be used in a regex pattern # Escaping substring to be used in a regex pattern
pattern = re.escape(substring) pattern = re.escape(substring)
@ -37,7 +42,7 @@ async def search_categories(query:str, graph: Union[nx.Graph, any], query_label:
""" """
# Determine which client is in use based on the configuration # Determine which client is in use based on the configuration
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
if infrastructure_config.get_config()["graph_engine"] == GraphDBType.NETWORKX: if graph_config.graph_engine == GraphDBType.NETWORKX:
categories_and_ids = [ categories_and_ids = [
{'document_id': strip_exact_regex(_, "DATA_SUMMARY__"), 'Summary': data['summary']} {'document_id': strip_exact_regex(_, "DATA_SUMMARY__"), 'Summary': data['summary']}
@ -53,7 +58,7 @@ async def search_categories(query:str, graph: Union[nx.Graph, any], query_label:
descriptions = {node: graph.nodes[node].get('description', 'No desc available') for node in connected_nodes} descriptions = {node: graph.nodes[node].get('description', 'No desc available') for node in connected_nodes}
return descriptions return descriptions
elif infrastructure_config.get_config()["graph_engine"] == GraphDBType.NEO4J: elif graph_config.graph_engine == GraphDBType.NEO4J:
# Logic for Neo4j # Logic for Neo4j
cypher_query = """ cypher_query = """
MATCH (n) MATCH (n)

View file

@ -7,7 +7,10 @@ from pydantic import BaseModel
from cognee.modules.search.llm.extraction.categorize_relevant_category import categorize_relevant_category from cognee.modules.search.llm.extraction.categorize_relevant_category import categorize_relevant_category
from cognee.shared.data_models import GraphDBType from cognee.shared.data_models import GraphDBType
from cognee.infrastructure.databases.graph.config import get_graph_config
graph_config = get_graph_config()
from cognee.infrastructure.databases.vector.config import get_vectordb_config
vector_config = get_vectordb_config()
async def search_cypher(query:str, graph: Union[nx.Graph, any]): async def search_cypher(query:str, graph: Union[nx.Graph, any]):
""" """
@ -16,7 +19,7 @@ async def search_cypher(query:str, graph: Union[nx.Graph, any]):
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
if infrastructure_config.get_config()["graph_engine"] == GraphDBType.NEO4J: if graph_config.graph_engine == GraphDBType.NEO4J:
result = await graph.run(query) result = await graph.run(query)
return result return result

View file

@ -6,7 +6,10 @@ from neo4j import AsyncSession
from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client
import networkx as nx import networkx as nx
from cognee.shared.data_models import GraphDBType from cognee.shared.data_models import GraphDBType
from cognee.infrastructure.databases.graph.config import get_graph_config
graph_config = get_graph_config()
from cognee.infrastructure.databases.vector.config import get_vectordb_config
vector_config = get_vectordb_config()
async def search_neighbour(graph: Union[nx.Graph, any], query: str, async def search_neighbour(graph: Union[nx.Graph, any], query: str,
other_param: dict = None): other_param: dict = None):
""" """
@ -28,7 +31,7 @@ async def search_neighbour(graph: Union[nx.Graph, any], query: str,
if node_id is None: if node_id is None:
return [] return []
if infrastructure_config.get_config()["graph_engine"] == GraphDBType.NETWORKX: if graph_config.graph_engine == GraphDBType.NETWORKX:
relevant_context = [] relevant_context = []
target_layer_uuid = graph.nodes[node_id].get('layer_uuid') target_layer_uuid = graph.nodes[node_id].get('layer_uuid')
@ -39,7 +42,7 @@ async def search_neighbour(graph: Union[nx.Graph, any], query: str,
return relevant_context return relevant_context
elif infrastructure_config.get_config()["graph_engine"] == GraphDBType.NEO4J: elif graph_config.graph_engine == GraphDBType.NEO4J:
if isinstance(graph, AsyncSession): if isinstance(graph, AsyncSession):
cypher_query = """ cypher_query = """
MATCH (target {id: $node_id}) MATCH (target {id: $node_id})

View file

@ -7,7 +7,10 @@ from cognee.infrastructure import infrastructure_config
from cognee.modules.search.llm.extraction.categorize_relevant_summary import categorize_relevant_summary from cognee.modules.search.llm.extraction.categorize_relevant_summary import categorize_relevant_summary
from cognee.shared.data_models import GraphDBType, ResponseSummaryModel from cognee.shared.data_models import GraphDBType, ResponseSummaryModel
from cognee.infrastructure.databases.graph.config import get_graph_config
graph_config = get_graph_config()
from cognee.infrastructure.databases.vector.config import get_vectordb_config
vector_config = get_vectordb_config()
import re import re
def strip_exact_regex(s, substring): def strip_exact_regex(s, substring):
@ -30,7 +33,7 @@ async def search_summary( query: str, graph: Union[nx.Graph, any]) -> Dict[str,
- Dict[str, str]: A dictionary where keys are node identifiers containing the query string, and values are their 'summary' attributes. - Dict[str, str]: A dictionary where keys are node identifiers containing the query string, and values are their 'summary' attributes.
""" """
if infrastructure_config.get_config()["graph_engine"] == GraphDBType.NETWORKX: if graph_config.graph_engine == GraphDBType.NETWORKX:
print("graph", graph) print("graph", graph)
summaries_and_ids = [ summaries_and_ids = [
{'document_id': strip_exact_regex(_, "DATA_SUMMARY__"), 'Summary': data['summary']} {'document_id': strip_exact_regex(_, "DATA_SUMMARY__"), 'Summary': data['summary']}
@ -48,7 +51,7 @@ async def search_summary( query: str, graph: Union[nx.Graph, any]) -> Dict[str,
return descriptions return descriptions
elif infrastructure_config.get_config()["graph_engine"] == GraphDBType.NEO4J: elif graph_config.graph_engine == GraphDBType.NEO4J:
cypher_query = f""" cypher_query = f"""
MATCH (n) MATCH (n)
WHERE n.id CONTAINS $query AND EXISTS(n.summary) WHERE n.id CONTAINS $query AND EXISTS(n.summary)

View file

View file

@ -1,10 +1,13 @@
from dsp.utils import deduplicate from dsp.utils import deduplicate
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client
from cognee.infrastructure.databases.graph.config import get_graph_config
graph_config = get_graph_config()
from cognee.infrastructure.databases.vector.config import get_vectordb_config
vector_config = get_vectordb_config()
async def search_similarity(query: str, graph): async def search_similarity(query: str, graph):
graph_db_type = infrastructure_config.get_config()["graph_engine"] graph_db_type = graph_config.graph_engine
graph_client = await get_graph_client(graph_db_type) graph_client = await get_graph_client(graph_db_type)
@ -17,7 +20,7 @@ async def search_similarity(query: str, graph):
graph_nodes = [] graph_nodes = []
for layer_id in unique_layer_uuids: for layer_id in unique_layer_uuids:
vector_engine = infrastructure_config.get_config()["vector_engine"] vector_engine = vector_config.vector_engine
results = await vector_engine.search(layer_id, query_text = query, limit = 10) results = await vector_engine.search(layer_id, query_text = query, limit = 10)
print("results", results) print("results", results)

View file

@ -1,10 +1,11 @@
from cognee.config import Config from cognee.config import Config
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.infrastructure.llm import llm_config from cognee.infrastructure.llm.config import get_llm_config
def get_settings(): def get_settings():
config = Config() config = Config()
config.load() config.load()
llm_config = get_llm_config()
vector_dbs = [{ vector_dbs = [{
"value": "weaviate", "value": "weaviate",

View file

@ -1,7 +1,10 @@
from cognee.infrastructure.InfrastructureConfig import infrastructure_config from cognee.infrastructure.InfrastructureConfig import infrastructure_config
from cognee.infrastructure.databases.relational.config import get_relationaldb_config
config = get_relationaldb_config()
def create_task_status_table(): def create_task_status_table():
db_engine = infrastructure_config.get_config("database_engine") db_engine = config.db_engine
db_engine.create_table("cognee_task_status", [ db_engine.create_table("cognee_task_status", [
dict(name = "data_id", type = "STRING"), dict(name = "data_id", type = "STRING"),

View file

@ -1,5 +1,8 @@
from cognee.infrastructure.InfrastructureConfig import infrastructure_config from cognee.infrastructure.InfrastructureConfig import infrastructure_config
from cognee.infrastructure.databases.relational.config import get_relationaldb_config
config = get_relationaldb_config()
def update_task_status(data_id: str, status: str): def update_task_status(data_id: str, status: str):
db_engine = infrastructure_config.get_config("database_engine") db_engine = config.db_engine
db_engine.insert_data("cognee_task_status", [dict(data_id = data_id, status = status)]) db_engine.insert_data("cognee_task_status", [dict(data_id = data_id, status = status)])

View file

@ -2,13 +2,15 @@ import logging
from typing import List, Dict from typing import List, Dict
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.modules.topology.extraction.extract_topology import extract_categories from cognee.modules.topology.extraction.extract_topology import extract_categories
from cognee.modules.cognify.config import get_cognify_config
cognify_config = get_cognify_config()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
async def infer_data_topology(content: str, graph_topology=None): async def infer_data_topology(content: str, graph_topology=None):
if graph_topology is None: if graph_topology is None:
graph_topology = infrastructure_config.get_config()["graph_topology"] graph_topology = cognify_config.graph_topology
try: try:
return (await extract_categories( return (await extract_categories(
content, content,

View file

@ -1,35 +1,17 @@
import os import os
import glob import glob
from pydantic import BaseModel, create_model
from typing import Dict, Type, Any
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from typing import Dict, List, Optional, Union from typing import Dict, List, Optional, Union, Type, Any, Tuple
from datetime import datetime from datetime import datetime
from cognee import config from cognee import config
from cognee.base_config import get_base_config
from cognee.infrastructure import infrastructure_config from cognee.infrastructure import infrastructure_config
from cognee.modules.cognify.config import get_cognify_config
from cognee.modules.topology.infer_data_topology import infer_data_topology from cognee.modules.topology.infer_data_topology import infer_data_topology
cognify_config = get_cognify_config()
base_config = get_base_config()
# class UserLocation(BaseModel):
# location_id: str
# description: str
# default_relationship: Relationship = Relationship(type = "located_in")
#
# class UserProperties(BaseModel):
# custom_properties: Optional[Dict[str, Any]] = None
# location: Optional[UserLocation] = None
#
# class DefaultGraphModel(BaseModel):
# node_id: str
# user_properties: UserProperties = UserProperties()
# documents: List[Document] = []
# default_fields: Optional[Dict[str, Any]] = {}
# default_relationship: Relationship = Relationship(type = "has_properties")
#
class Relationship(BaseModel): class Relationship(BaseModel):
type: str = Field(..., description="The type of relationship, e.g., 'belongs_to'.") type: str = Field(..., description="The type of relationship, e.g., 'belongs_to'.")
source: Optional[str] = Field(None, description="The identifier of the source id of in the relationship being a directory or subdirectory") source: Optional[str] = Field(None, description="The identifier of the source id of in the relationship being a directory or subdirectory")
@ -37,7 +19,6 @@ class Relationship(BaseModel):
properties: Optional[Dict[str, Any]] = Field(None, description="A dictionary of additional properties and values related to the relationship.") properties: Optional[Dict[str, Any]] = Field(None, description="A dictionary of additional properties and values related to the relationship.")
class Document(BaseModel): class Document(BaseModel):
node_id: str node_id: str
title: str title: str
@ -53,8 +34,10 @@ class DirectoryModel(BaseModel):
subdirectories: List['DirectoryModel'] = [] subdirectories: List['DirectoryModel'] = []
default_relationship: Relationship default_relationship: Relationship
DirectoryModel.update_forward_refs() DirectoryModel.update_forward_refs()
class DirMetadata(BaseModel): class DirMetadata(BaseModel):
node_id: str node_id: str
summary: str summary: str
@ -64,6 +47,7 @@ class DirMetadata(BaseModel):
documents: List[Document] = [] documents: List[Document] = []
default_relationship: Relationship default_relationship: Relationship
class GitHubRepositoryModel(BaseModel): class GitHubRepositoryModel(BaseModel):
node_id: str node_id: str
metadata: DirMetadata metadata: DirMetadata
@ -71,10 +55,10 @@ class GitHubRepositoryModel(BaseModel):
class TopologyEngine: class TopologyEngine:
def __init__(self): def __init__(self) -> None:
self.models: Dict[str, Type[BaseModel]] = {} self.models: Dict[str, Type[BaseModel]] = {}
async def populate_model(self, directory_path, file_structure, parent_id=None): async def populate_model(self, directory_path: str, file_structure: Dict[str, Union[Dict, Tuple[str, ...]]], parent_id: Optional[str] = None) -> DirectoryModel:
directory_id = os.path.basename(directory_path) or "root" directory_id = os.path.basename(directory_path) or "root"
directory = DirectoryModel( directory = DirectoryModel(
node_id=directory_id, node_id=directory_id,
@ -100,18 +84,17 @@ class TopologyEngine:
return directory return directory
async def infer_from_directory_structure(self, node_id:str, repository: str, model): async def infer_from_directory_structure(self, node_id: str, repository: str, model: Type[BaseModel]) -> GitHubRepositoryModel:
""" Infer the topology of a repository from its file structure """ """ Infer the topology of a repository from its file structure """
path = infrastructure_config.get_config()["data_root_directory"] path = base_config.data_root_directory
path = path + "/" + str(repository)
path = path +"/"+ str(repository)
print(path) print(path)
if not os.path.exists(path): if not os.path.exists(path):
raise FileNotFoundError(f"No such directory: {path}") raise FileNotFoundError(f"No such directory: {path}")
root = {} root: Dict[str, Union[Dict, Tuple[str, ...]]] = {}
for filename in glob.glob(f"{path}/**", recursive=True): for filename in glob.glob(f"{path}/**", recursive=True):
parts = os.path.relpath(filename, start=path).split(os.path.sep) parts = os.path.relpath(filename, start=path).split(os.path.sep)
current = root current = root
@ -128,8 +111,6 @@ class TopologyEngine:
root_directory = await self.populate_model('/', root) root_directory = await self.populate_model('/', root)
# repository_metadata = await infer_data_topology(str(root), DirMetadata)
repository_metadata = DirMetadata( repository_metadata = DirMetadata(
node_id="repo1", node_id="repo1",
summary="Example repository", summary="Example repository",
@ -147,13 +128,10 @@ class TopologyEngine:
return active_model return active_model
# print(github_repo_model) def load(self, model_name: str) -> Optional[Type[BaseModel]]:
def load(self, model_name: str):
return self.models.get(model_name) return self.models.get(model_name)
def extrapolate(self, model_name: str): def extrapolate(self, model_name: str) -> None:
# This method would be implementation-specific depending on what "extrapolate" means # This method would be implementation-specific depending on what "extrapolate" means
pass pass
@ -164,15 +142,16 @@ if __name__ == "__main__":
config.data_root_directory(data_directory_path) config.data_root_directory(data_directory_path)
cognee_directory_path = os.path.abspath("../.cognee_system") cognee_directory_path = os.path.abspath("../.cognee_system")
config.system_root_directory(cognee_directory_path) config.system_root_directory(cognee_directory_path)
async def main():
async def main() -> None:
engine = TopologyEngine() engine = TopologyEngine()
# model = engine.load("GitHubRepositoryModel") # model = engine.load("GitHubRepositoryModel")
# if model is None: # if model is None:
# raise ValueError("Model not found") # raise ValueError("Model not found")
result = await engine.infer("example") result = await engine.infer_from_directory_structure("example_node_id", "example_repo", GitHubRepositoryModel)
print(result) print(result)
import asyncio import asyncio
asyncio.run(main()) asyncio.run(main())
# result = engine.extrapolate("GitHubRepositoryModel") # result = engine.extrapolate("GitHubRepositoryModel")
# print(result) # print(result)

View file

@ -1,6 +1,22 @@
from os import path from os import path
import logging
from pathlib import Path
logging.basicConfig(level=logging.DEBUG)
# ROOT_DIR = path.dirname(path.abspath(__file__))
#
# logging.debug("ROOT_DIR: ", ROOT_DIR)
#
# def get_absolute_path(path_from_root: str) -> str:
# logging.debug("abspath: ", path.abspath(path.join(ROOT_DIR, path_from_root)))
#
#
# return path.abspath(path.join(ROOT_DIR, path_from_root))
ROOT_DIR = Path(__file__).resolve().parent
ROOT_DIR = path.dirname(path.abspath(__file__)) logging.basicConfig(level=logging.DEBUG)
logging.debug("ROOT_DIR: %s", ROOT_DIR)
def get_absolute_path(path_from_root: str) -> str: def get_absolute_path(path_from_root: str) -> str:
return path.abspath(path.join(ROOT_DIR, path_from_root)) absolute_path = ROOT_DIR / path_from_root
logging.debug("abspath: %s", absolute_path.resolve())
return str(absolute_path.resolve())

View file

@ -206,6 +206,7 @@ class DefaultCognitiveLayer(BaseModel):
class GraphDBType(Enum): class GraphDBType(Enum):
NETWORKX = auto() NETWORKX = auto()
NEO4J = auto() NEO4J = auto()
FALKORDB = auto()
# Models for representing different entities # Models for representing different entities

View file

@ -1,26 +1,35 @@
import logging
import os
logging.basicConfig(level=logging.DEBUG)
async def main(): async def main():
from os import path from os import path
import pathlib import pathlib
import cognee import cognee
logging.basicConfig(level=logging.DEBUG)
print("Working dir: ", str(pathlib.Path(__file__).parent)) # print("Working dir: ", str(pathlib.Path(__file__).parent))
data_directory_path = str(pathlib.Path(path.join(pathlib.Path(__file__).parent, "../../.data")).resolve()) # data_directory_path = str(pathlib.Path(path.join(pathlib.Path(__file__).parent, "../../.data")).resolve())
print("Data dir: ", data_directory_path) # print("Data dir: ", data_directory_path)
cognee.config.data_root_directory(data_directory_path) # cognee.config.data_root_directory(data_directory_path)
#
# cognee_directory_path = str(pathlib.Path(path.join(pathlib.Path(__file__).parent, "../../.cognee_system")).resolve())
# print("System dir: ", cognee_directory_path)
# cognee.config.system_root_directory(cognee_directory_path)
cognee_directory_path = str(pathlib.Path(path.join(pathlib.Path(__file__).parent, "../../.cognee_system")).resolve()) logging.debug("CURRENCT CWD: %s", pathlib.Path(__file__).parent)
print("System dir: ", cognee_directory_path) logging.debug("CURRENCT CWD: %s", os.getcwd())
cognee.config.system_root_directory(cognee_directory_path)
await cognee.prune.prune_system()
dataset_name = "cs_explanations" dataset_name = "cs_explanations"
explanation_file_path = path.join(pathlib.Path(__file__).parent, "test_data/Natural_language_processing.txt")
explanation_file_path = os.path.join(pathlib.Path(__file__).parent, "test_data/Natural_language_processing.txt")
await cognee.add([explanation_file_path], dataset_name) await cognee.add([explanation_file_path], dataset_name)
dataset_name = "short_stories" # dataset_name = "short_stories"
# data_directory_path is defined above # # data_directory_path is defined above
await cognee.add("data://" + data_directory_path, dataset_name) # await cognee.add("data://" + "/Users/runner/work/cognee/cognee/./cognee/tests", dataset_name)
text_1 = """A quantum computer is a computer that takes advantage of quantum mechanical phenomena. text_1 = """A quantum computer is a computer that takes advantage of quantum mechanical phenomena.
At small scales, physical matter exhibits properties of both particles and waves, and quantum computing leverages this behavior, specifically quantum superposition and entanglement, using specialized hardware that supports the preparation and manipulation of quantum states. At small scales, physical matter exhibits properties of both particles and waves, and quantum computing leverages this behavior, specifically quantum superposition and entanglement, using specialized hardware that supports the preparation and manipulation of quantum states.
@ -36,17 +45,17 @@ async def main():
Some notable LLMs are OpenAI's GPT series of models (e.g., GPT-3.5 and GPT-4, used in ChatGPT and Microsoft Copilot), Google's PaLM and Gemini (the latter of which is currently used in the chatbot of the same name), xAI's Grok, Meta's LLaMA family of open-source models, Anthropic's Claude models, Mistral AI's open source models, and Databricks' open source DBRX. Some notable LLMs are OpenAI's GPT series of models (e.g., GPT-3.5 and GPT-4, used in ChatGPT and Microsoft Copilot), Google's PaLM and Gemini (the latter of which is currently used in the chatbot of the same name), xAI's Grok, Meta's LLaMA family of open-source models, Anthropic's Claude models, Mistral AI's open source models, and Databricks' open source DBRX.
""" """
#
# dataset_name = "cs_explanations"
# await cognee.add(
# [
# text_1,
# text_2
# ],
# dataset_name
# )
dataset_name = "cs_explanations" await cognee.cognify([ "cs_explanations"])
await cognee.add(
[
text_1,
text_2
],
dataset_name
)
await cognee.cognify(["short_stories", "cs_explanations"])
if __name__ == "__main__": if __name__ == "__main__":

0
evals/__init__.py Normal file
View file

446
poetry.lock generated
View file

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]] [[package]]
name = "aiofiles" name = "aiofiles"
@ -576,17 +576,17 @@ numpy = {version = ">=1.19.0", markers = "python_version >= \"3.9\""}
[[package]] [[package]]
name = "boto3" name = "boto3"
version = "1.34.110" version = "1.34.113"
description = "The AWS SDK for Python" description = "The AWS SDK for Python"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "boto3-1.34.110-py3-none-any.whl", hash = "sha256:2fc871b4a5090716c7a71af52c462e539529227f4d4888fd04896d5028f9cedc"}, {file = "boto3-1.34.113-py3-none-any.whl", hash = "sha256:7e59f0a848be477a4c98a90e7a18a0e284adfb643f7879d2b303c5f493661b7a"},
{file = "boto3-1.34.110.tar.gz", hash = "sha256:83ffe2273da7bdfdb480d85b0705f04e95bd110e9741f23328b7c76c03e6d53c"}, {file = "boto3-1.34.113.tar.gz", hash = "sha256:009cd143509f2ff4c37582c3f45d50f28c95eed68e8a5c36641206bdb597a9ea"},
] ]
[package.dependencies] [package.dependencies]
botocore = ">=1.34.110,<1.35.0" botocore = ">=1.34.113,<1.35.0"
jmespath = ">=0.7.1,<2.0.0" jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.10.0,<0.11.0" s3transfer = ">=0.10.0,<0.11.0"
@ -595,13 +595,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]] [[package]]
name = "botocore" name = "botocore"
version = "1.34.110" version = "1.34.113"
description = "Low-level, data-driven core of boto 3." description = "Low-level, data-driven core of boto 3."
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "botocore-1.34.110-py3-none-any.whl", hash = "sha256:1edf3a825ec0a5edf238b2d42ad23305de11d5a71bb27d6f9a58b7e8862df1b6"}, {file = "botocore-1.34.113-py3-none-any.whl", hash = "sha256:8ca87776450ef41dd25c327eb6e504294230a5756940d68bcfdedc4a7cdeca97"},
{file = "botocore-1.34.110.tar.gz", hash = "sha256:b2c98c40ecf0b1facb9e61ceb7dfa28e61ae2456490554a16c8dbf99f20d6a18"}, {file = "botocore-1.34.113.tar.gz", hash = "sha256:449912ba3c4ded64f21d09d428146dd9c05337b2a112e15511bf2c4888faae79"},
] ]
[package.dependencies] [package.dependencies]
@ -1332,13 +1332,13 @@ files = [
[[package]] [[package]]
name = "deepeval" name = "deepeval"
version = "0.21.44" version = "0.21.45"
description = "The open-source evaluation framework for LLMs." description = "The open-source evaluation framework for LLMs."
optional = false optional = false
python-versions = "*" python-versions = "*"
files = [ files = [
{file = "deepeval-0.21.44-py3-none-any.whl", hash = "sha256:66ce660de935fda8991e2c8b762aac11bdeae09a06943aba4f952e5952a7da49"}, {file = "deepeval-0.21.45-py3-none-any.whl", hash = "sha256:6caffaed542efb15a3c2bc955f233a29f9e754ea1d2ce32a1d8d84deaa6a3e2f"},
{file = "deepeval-0.21.44.tar.gz", hash = "sha256:262219baf2ac308aa1a697c885b40f8e90e6b83fa09c26a4e3c5406f253f9306"}, {file = "deepeval-0.21.45.tar.gz", hash = "sha256:43998438e6defb86109234f5e27978df62c807a28f998a73ba7b0bd63920206c"},
] ]
[package.dependencies] [package.dependencies]
@ -1567,58 +1567,58 @@ weaviate = ["weaviate-client (>=3.26.1,<3.27.0)", "weaviate-client (>=4.5.4,<4.6
[[package]] [[package]]
name = "duckdb" name = "duckdb"
version = "0.10.2" version = "0.10.3"
description = "DuckDB in-process database" description = "DuckDB in-process database"
optional = false optional = false
python-versions = ">=3.7.0" python-versions = ">=3.7.0"
files = [ files = [
{file = "duckdb-0.10.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3891d3ac03e12a3e5c43afa3020fe701f64060f52d25f429a1ed7b5d914368d3"}, {file = "duckdb-0.10.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd25cc8d001c09a19340739ba59d33e12a81ab285b7a6bed37169655e1cefb31"},
{file = "duckdb-0.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f63877651f1fb940e049dc53038eb763856616319acf4f892b1c3ed074f5ab0"}, {file = "duckdb-0.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f9259c637b917ca0f4c63887e8d9b35ec248f5d987c886dfc4229d66a791009"},
{file = "duckdb-0.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:06e3a36f04f4d98d2c0bbdd63e517cfbe114a795306e26ec855e62e076af5043"}, {file = "duckdb-0.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b48f5f1542f1e4b184e6b4fc188f497be8b9c48127867e7d9a5f4a3e334f88b0"},
{file = "duckdb-0.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf5f95ad5b75c8e65c6508b4df02043dd0b9d97712b9a33236ad77c388ce7861"}, {file = "duckdb-0.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e327f7a3951ea154bb56e3fef7da889e790bd9a67ca3c36afc1beb17d3feb6d6"},
{file = "duckdb-0.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ff62bc98278c98fecbd6eecec5d698ad41ebd654110feaadbf8ac8bb59b1ecf"}, {file = "duckdb-0.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d8b20ed67da004b4481973f4254fd79a0e5af957d2382eac8624b5c527ec48c"},
{file = "duckdb-0.10.2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cceede13fde095c23cf9a53adf7c414c7bfb21b9a7aa6a4836014fdbecbfca70"}, {file = "duckdb-0.10.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d37680b8d7be04e4709db3a66c8b3eb7ceba2a5276574903528632f2b2cc2e60"},
{file = "duckdb-0.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:acdfff60b7efccd7f731213a9795851256249dfacf80367074b2b2e144f716dd"}, {file = "duckdb-0.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d34b86d6a2a6dfe8bb757f90bfe7101a3bd9e3022bf19dbddfa4b32680d26a9"},
{file = "duckdb-0.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:4a5d5655cf0bdaf664a6f332afe465e02b08cef715548a0983bb7aef48da06a6"}, {file = "duckdb-0.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:73b1cb283ca0f6576dc18183fd315b4e487a545667ffebbf50b08eb4e8cdc143"},
{file = "duckdb-0.10.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a9d15842876d18763e085648656cccc7660a215d16254906db5c4471be2c7732"}, {file = "duckdb-0.10.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d917dde19fcec8cadcbef1f23946e85dee626ddc133e1e3f6551f15a61a03c61"},
{file = "duckdb-0.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c88cdcdc8452c910e4298223e7d9fca291534ff5aa36090aa49c9e6557550b13"}, {file = "duckdb-0.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46757e0cf5f44b4cb820c48a34f339a9ccf83b43d525d44947273a585a4ed822"},
{file = "duckdb-0.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:364cd6f5dc8a1010d144d08c410ba9a74c521336ee5bda84fabc6616216a6d6a"}, {file = "duckdb-0.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:338c14d8ac53ac4aa9ec03b6f1325ecfe609ceeb72565124d489cb07f8a1e4eb"},
{file = "duckdb-0.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c57c11d1060296f5e9ebfb5bb7e5521e0d77912e8f9ff43c90240c3311e9de9"}, {file = "duckdb-0.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:651fcb429602b79a3cf76b662a39e93e9c3e6650f7018258f4af344c816dab72"},
{file = "duckdb-0.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:186d86b8dda8e1076170eb770bb2bb73ea88ca907d92885c9695d6515207b205"}, {file = "duckdb-0.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3ae3c73b98b6215dab93cc9bc936b94aed55b53c34ba01dec863c5cab9f8e25"},
{file = "duckdb-0.10.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f65b62f31c6bff21afc0261cfe28d238b8f34ec78f339546b12f4740c39552a"}, {file = "duckdb-0.10.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56429b2cfe70e367fb818c2be19f59ce2f6b080c8382c4d10b4f90ba81f774e9"},
{file = "duckdb-0.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a860d7466a5c93714cdd94559ce9e1db2ab91914f0941c25e5e93d4ebe36a5fa"}, {file = "duckdb-0.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b46c02c2e39e3676b1bb0dc7720b8aa953734de4fd1b762e6d7375fbeb1b63af"},
{file = "duckdb-0.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:33308190e9c7f05a3a0a2d46008a043effd4eae77011869d7c18fb37acdd9215"}, {file = "duckdb-0.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:bcd460feef56575af2c2443d7394d405a164c409e9794a4d94cb5fdaa24a0ba4"},
{file = "duckdb-0.10.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3a8b2f1229b4aecb79cd28ffdb99032b1497f0a805d0da1136a9b6115e1afc70"}, {file = "duckdb-0.10.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e229a7c6361afbb0d0ab29b1b398c10921263c52957aefe3ace99b0426fdb91e"},
{file = "duckdb-0.10.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d23a6dea61963733a0f45a0d0bbb1361fb2a47410ed5ff308b4a1f869d4eeb6f"}, {file = "duckdb-0.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:732b1d3b6b17bf2f32ea696b9afc9e033493c5a3b783c292ca4b0ee7cc7b0e66"},
{file = "duckdb-0.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:20ee0aa27e688aa52a40b434ec41a50431d0b06edeab88edc2feaca18d82c62c"}, {file = "duckdb-0.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5380d4db11fec5021389fb85d614680dc12757ef7c5881262742250e0b58c75"},
{file = "duckdb-0.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80a6d43d9044f0997a15a92e0c0ff3afd21151a1e572a92f439cc4f56b7090e1"}, {file = "duckdb-0.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:468a4e0c0b13c55f84972b1110060d1b0f854ffeb5900a178a775259ec1562db"},
{file = "duckdb-0.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6934758cacd06029a5c9f54556a43bd277a86757e22bf8d0dd11ca15c1813d1c"}, {file = "duckdb-0.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa1e7ff8d18d71defa84e79f5c86aa25d3be80d7cb7bc259a322de6d7cc72da"},
{file = "duckdb-0.10.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a11e2d68bd79044eea5486b1cddb5b915115f537e5c74eeb94c768ce30f9f4b"}, {file = "duckdb-0.10.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed1063ed97c02e9cf2e7fd1d280de2d1e243d72268330f45344c69c7ce438a01"},
{file = "duckdb-0.10.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0bf58385c43b8e448a2fea7e8729054934bf73ea616d1d7ef8184eda07f975e2"}, {file = "duckdb-0.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:22f2aad5bb49c007f3bfcd3e81fdedbc16a2ae41f2915fc278724ca494128b0c"},
{file = "duckdb-0.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:eae75c7014597ded6e7f6dc51e32d48362a31608acd73e9f795748ee94335a54"}, {file = "duckdb-0.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:8f9e2bb00a048eb70b73a494bdc868ce7549b342f7ffec88192a78e5a4e164bd"},
{file = "duckdb-0.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:62e89deff778a7a86f651802b947a3466425f6cce41e9d7d412d39e492932943"}, {file = "duckdb-0.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6c2fc49875b4b54e882d68703083ca6f84b27536d57d623fc872e2f502b1078"},
{file = "duckdb-0.10.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f87e555fd36ec6da316b727a39fb24c53124a797dfa9b451bdea87b2f20a351f"}, {file = "duckdb-0.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a66c125d0c30af210f7ee599e7821c3d1a7e09208196dafbf997d4e0cfcb81ab"},
{file = "duckdb-0.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41e8b34b1a944590ebcf82f8cc59d67b084fe99479f048892d60da6c1402c386"}, {file = "duckdb-0.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99dd7a1d901149c7a276440d6e737b2777e17d2046f5efb0c06ad3b8cb066a6"},
{file = "duckdb-0.10.2-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2c68c6dde2773774cf2371522a3959ea2716fc2b3a4891d4066f0e426455fe19"}, {file = "duckdb-0.10.3-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ec3bbdb209e6095d202202893763e26c17c88293b88ef986b619e6c8b6715bd"},
{file = "duckdb-0.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ff6a8a0980d0f9398fa461deffa59465dac190d707468478011ea8a5fe1f2c81"}, {file = "duckdb-0.10.3-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:2b3dec4ef8ed355d7b7230b40950b30d0def2c387a2e8cd7efc80b9d14134ecf"},
{file = "duckdb-0.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:728dd4ff0efda387a424754e5508d4f8c72a272c2d3ccb036a83286f60b46002"}, {file = "duckdb-0.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:04129f94fb49bba5eea22f941f0fb30337f069a04993048b59e2811f52d564bc"},
{file = "duckdb-0.10.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c461d6b4619e80170044a9eb999bbf4097e330d3a4974ced0a7eaeb79c7c39f6"}, {file = "duckdb-0.10.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d75d67024fc22c8edfd47747c8550fb3c34fb1cbcbfd567e94939ffd9c9e3ca7"},
{file = "duckdb-0.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:909351ff72eb3b50b89761251148d8a186594d8a438e12dcf5494794caff6693"}, {file = "duckdb-0.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3796e9507c02d0ddbba2e84c994fae131da567ce3d9cbb4cbcd32fadc5fbb26"},
{file = "duckdb-0.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d9eeb8393d69abafd355b869669957eb85b89e4df677e420b9ef0693b7aa6cb4"}, {file = "duckdb-0.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:78e539d85ebd84e3e87ec44d28ad912ca4ca444fe705794e0de9be3dd5550c11"},
{file = "duckdb-0.10.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3102bcf5011e8f82ea3c2bde43108774fe5a283a410d292c0843610ea13e2237"}, {file = "duckdb-0.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a99b67ac674b4de32073e9bc604b9c2273d399325181ff50b436c6da17bf00a"},
{file = "duckdb-0.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d64d443613e5f16caf7d67102733538c90f7715867c1a98597efd3babca068e3"}, {file = "duckdb-0.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1209a354a763758c4017a1f6a9f9b154a83bed4458287af9f71d84664ddb86b6"},
{file = "duckdb-0.10.2-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cb31398826d1b7473344e5ee8e0f826370c9752549469ba1327042ace9041f80"}, {file = "duckdb-0.10.3-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b735cea64aab39b67c136ab3a571dbf834067f8472ba2f8bf0341bc91bea820"},
{file = "duckdb-0.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d09dcec467cd6127d5cc1fb0ce4efbd77e761882d9d772b0f64fc2f79a2a1cde"}, {file = "duckdb-0.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:816ffb9f758ed98eb02199d9321d592d7a32a6cb6aa31930f4337eb22cfc64e2"},
{file = "duckdb-0.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:82fab1a24faf7c33d8a7afed08b57ee36e8821a3a68a2f1574cd238ea440bba0"}, {file = "duckdb-0.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:1631184b94c3dc38b13bce4045bf3ae7e1b0ecbfbb8771eb8d751d8ffe1b59b3"},
{file = "duckdb-0.10.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38607e6e6618e8ea28c8d9b67aa9e22cfd6d6d673f2e8ab328bd6e867b697f69"}, {file = "duckdb-0.10.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb98c35fc8dd65043bc08a2414dd9f59c680d7e8656295b8969f3f2061f26c52"},
{file = "duckdb-0.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fb0c23bc8c09615bff38aebcf8e92e6ae74959c67b3c9e5b00edddc730bf22be"}, {file = "duckdb-0.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e75c9f5b6a92b2a6816605c001d30790f6d67ce627a2b848d4d6040686efdf9"},
{file = "duckdb-0.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:00576c11c78c83830ab483bad968e07cd9b5f730e7ffaf5aa5fadee5ac4f71e9"}, {file = "duckdb-0.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae786eddf1c2fd003466e13393b9348a44b6061af6fe7bcb380a64cac24e7df7"},
{file = "duckdb-0.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:077db692cdda50c4684ef87dc2a68507665804caa90e539dbe819116bda722ad"}, {file = "duckdb-0.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9387da7b7973707b0dea2588749660dd5dd724273222680e985a2dd36787668"},
{file = "duckdb-0.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca25984ad9f9a04e46e8359f852668c11569534e3bb8424b80be711303ad2314"}, {file = "duckdb-0.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:538f943bf9fa8a3a7c4fafa05f21a69539d2c8a68e557233cbe9d989ae232899"},
{file = "duckdb-0.10.2-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6a72cc40982c7b92cf555e574618fc711033b013bf258b611ba18d7654c89d8c"}, {file = "duckdb-0.10.3-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6930608f35025a73eb94252964f9f19dd68cf2aaa471da3982cf6694866cfa63"},
{file = "duckdb-0.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27b9efd6e788eb561535fdc0cbc7c74aca1ff39f748b7cfc27aa49b00e22da1"}, {file = "duckdb-0.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:03bc54a9cde5490918aad82d7d2a34290e3dfb78d5b889c6626625c0f141272a"},
{file = "duckdb-0.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:4800469489bc262dda61a7f1d40acedf67cf2454874e9d8bbf07920dc2b147e6"}, {file = "duckdb-0.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:372b6e3901d85108cafe5df03c872dfb6f0dbff66165a0cf46c47246c1957aa0"},
{file = "duckdb-0.10.2.tar.gz", hash = "sha256:0f609c9d5f941f1ecde810f010dd9321cd406a552c1df20318a13fa64247f67f"}, {file = "duckdb-0.10.3.tar.gz", hash = "sha256:c5bd84a92bc708d3a6adffe1f554b94c6e76c795826daaaf482afc3d9c636971"},
] ]
[[package]] [[package]]
@ -1786,53 +1786,53 @@ files = [
[[package]] [[package]]
name = "fonttools" name = "fonttools"
version = "4.51.0" version = "4.52.1"
description = "Tools to manipulate font files" description = "Tools to manipulate font files"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:84d7751f4468dd8cdd03ddada18b8b0857a5beec80bce9f435742abc9a851a74"}, {file = "fonttools-4.52.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:67a30b872e79577e5319ce660ede4a5131fa8a45de76e696746545e17db4437f"},
{file = "fonttools-4.51.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b4850fa2ef2cfbc1d1f689bc159ef0f45d8d83298c1425838095bf53ef46308"}, {file = "fonttools-4.52.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a5bff35738f8f6607c4303561ee1d1e5f64d5b14cf3c472d3030566c82e763"},
{file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5b48a1121117047d82695d276c2af2ee3a24ffe0f502ed581acc2673ecf1037"}, {file = "fonttools-4.52.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c9622593dfff042480a1b7e5b72c4d7dc00b96d2b4f98b0bf8acf071087e0db"},
{file = "fonttools-4.51.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:180194c7fe60c989bb627d7ed5011f2bef1c4d36ecf3ec64daec8302f1ae0716"}, {file = "fonttools-4.52.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33cfc9fe27af5e113d157d5147e24fc8e5bda3c5aadb55bea9847ec55341ce30"},
{file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:96a48e137c36be55e68845fc4284533bda2980f8d6f835e26bca79d7e2006438"}, {file = "fonttools-4.52.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aa5bec5027d947ee4b2242caecf7dc6e4ea03833e92e9b5211ebb6ab4eede8b2"},
{file = "fonttools-4.51.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:806e7912c32a657fa39d2d6eb1d3012d35f841387c8fc6cf349ed70b7c340039"}, {file = "fonttools-4.52.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10e44bf8e5654050a332a79285bacd6bd3069084540aec46c0862391147a1daa"},
{file = "fonttools-4.51.0-cp310-cp310-win32.whl", hash = "sha256:32b17504696f605e9e960647c5f64b35704782a502cc26a37b800b4d69ff3c77"}, {file = "fonttools-4.52.1-cp310-cp310-win32.whl", hash = "sha256:7fba390ac2ca18ebdd456f3a9acfb4557d6dcb2eaba5cc3eadce01003892a770"},
{file = "fonttools-4.51.0-cp310-cp310-win_amd64.whl", hash = "sha256:c7e91abdfae1b5c9e3a543f48ce96013f9a08c6c9668f1e6be0beabf0a569c1b"}, {file = "fonttools-4.52.1-cp310-cp310-win_amd64.whl", hash = "sha256:15df3517eb95035422a5c953ca19aac99913c16aa0e4ef061aeaef5f3bcaf369"},
{file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a8feca65bab31479d795b0d16c9a9852902e3a3c0630678efb0b2b7941ea9c74"}, {file = "fonttools-4.52.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:40730aab9cf42286f314b985b483eea574f1bcf3a23e28223084cbb9e256457c"},
{file = "fonttools-4.51.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ac27f436e8af7779f0bb4d5425aa3535270494d3bc5459ed27de3f03151e4c2"}, {file = "fonttools-4.52.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a19bc2be3af5b22ff5c7fe858c380862e31052c74f62e2c6d565ed0855bed7a6"},
{file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e19bd9e9964a09cd2433a4b100ca7f34e34731e0758e13ba9a1ed6e5468cc0f"}, {file = "fonttools-4.52.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f859066d8afde53f2ddabcd0705061e6d9d9868757c6ae28abe49bc885292df4"},
{file = "fonttools-4.51.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2b92381f37b39ba2fc98c3a45a9d6383bfc9916a87d66ccb6553f7bdd129097"}, {file = "fonttools-4.52.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74cd3e3e9ba501e87a391b62e91f7b1610e8b3f3d706a368e5aee51614c1674e"},
{file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5f6bc991d1610f5c3bbe997b0233cbc234b8e82fa99fc0b2932dc1ca5e5afec0"}, {file = "fonttools-4.52.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:958957b81418647f66820480363cb617ba6b5bcf189ec6c4cea307d051048545"},
{file = "fonttools-4.51.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9696fe9f3f0c32e9a321d5268208a7cc9205a52f99b89479d1b035ed54c923f1"}, {file = "fonttools-4.52.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56addf1f995d94dad13aaaf56eb6def3d9ca97c2fada5e27af8190b3141e8633"},
{file = "fonttools-4.51.0-cp311-cp311-win32.whl", hash = "sha256:3bee3f3bd9fa1d5ee616ccfd13b27ca605c2b4270e45715bd2883e9504735034"}, {file = "fonttools-4.52.1-cp311-cp311-win32.whl", hash = "sha256:fea5456b2af42db8ecb1a6c2f144655ca6dcdcebd970f3145c56e668084ded7e"},
{file = "fonttools-4.51.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f08c901d3866a8905363619e3741c33f0a83a680d92a9f0e575985c2634fcc1"}, {file = "fonttools-4.52.1-cp311-cp311-win_amd64.whl", hash = "sha256:228faab7638cd726cdde5e2ec9ee10f780fbf9de9aa38d7f1e56a270437dff36"},
{file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4060acc2bfa2d8e98117828a238889f13b6f69d59f4f2d5857eece5277b829ba"}, {file = "fonttools-4.52.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7c6aeb0d53e2ea92009b11c3d4ad9c03d0ecdfe602d547bed8537836e464f51e"},
{file = "fonttools-4.51.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1250e818b5f8a679ad79660855528120a8f0288f8f30ec88b83db51515411fcc"}, {file = "fonttools-4.52.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e871123d12c92e2c9bda6369b69ce2da9cef40b119cc340451e413e90355fa38"},
{file = "fonttools-4.51.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76f1777d8b3386479ffb4a282e74318e730014d86ce60f016908d9801af9ca2a"}, {file = "fonttools-4.52.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ff8857dc9bb3e407c25aef3e025409cfbb23adb646a835636bebb1bdfc27a41"},
{file = "fonttools-4.51.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b5ad456813d93b9c4b7ee55302208db2b45324315129d85275c01f5cb7e61a2"}, {file = "fonttools-4.52.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7685fdc6e23267844eef2b9af585d7f171cca695e4eb369d7682544c3e2e1123"},
{file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:68b3fb7775a923be73e739f92f7e8a72725fd333eab24834041365d2278c3671"}, {file = "fonttools-4.52.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b1e1b2774485fbbb41a1beccc913b9c6f7971f78da61dd34207b9acc3cc2963e"},
{file = "fonttools-4.51.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e2f1a4499e3b5ee82c19b5ee57f0294673125c65b0a1ff3764ea1f9db2f9ef5"}, {file = "fonttools-4.52.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1e2c415160397fd6ed3964155aeec4bfefceeee365ab17161a5b3fe3f8dab077"},
{file = "fonttools-4.51.0-cp312-cp312-win32.whl", hash = "sha256:278e50f6b003c6aed19bae2242b364e575bcb16304b53f2b64f6551b9c000e15"}, {file = "fonttools-4.52.1-cp312-cp312-win32.whl", hash = "sha256:3ba2c4647e7decfb8e9cd346661c7d151dae1fba23d37b48bcf5fa8351f7b8c8"},
{file = "fonttools-4.51.0-cp312-cp312-win_amd64.whl", hash = "sha256:b3c61423f22165541b9403ee39874dcae84cd57a9078b82e1dce8cb06b07fa2e"}, {file = "fonttools-4.52.1-cp312-cp312-win_amd64.whl", hash = "sha256:d39b926f14a2f7a7f92ded7d266b18f0108d867364769ab59da88ac2fa90d288"},
{file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1621ee57da887c17312acc4b0e7ac30d3a4fb0fec6174b2e3754a74c26bbed1e"}, {file = "fonttools-4.52.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6e58d8097a269b6c43ec0abb3fa8d6c350ff0c7dfd23fc14d004610df88a4bb3"},
{file = "fonttools-4.51.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d9298be7a05bb4801f558522adbe2feea1b0b103d5294ebf24a92dd49b78e5"}, {file = "fonttools-4.52.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:20f0fc969817c50539dc919ed8c4aef4de28c2d6e0111a064112301f157aede4"},
{file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee1af4be1c5afe4c96ca23badd368d8dc75f611887fb0c0dac9f71ee5d6f110e"}, {file = "fonttools-4.52.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d62e84d38969491c6c1f6fe3dd63108e99d02de01bb3d98c160a5d4d24120910"},
{file = "fonttools-4.51.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b49adc721a7d0b8dfe7c3130c89b8704baf599fb396396d07d4aa69b824a1"}, {file = "fonttools-4.52.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8eb5a389bbdee6f4c422881de422ee0e7efdfcd9310b13d540b12aa8ae2c9e7b"},
{file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de7c29bdbdd35811f14493ffd2534b88f0ce1b9065316433b22d63ca1cd21f14"}, {file = "fonttools-4.52.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0caf05c969cbde6729dd97b64bea445ee152bb19215d5886f7b93bd0fb455468"},
{file = "fonttools-4.51.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cadf4e12a608ef1d13e039864f484c8a968840afa0258b0b843a0556497ea9ed"}, {file = "fonttools-4.52.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:df08bee1dd29a767311b50c62c0cfe4d72ae8c793e567d4c60b8c16c7c63a4f0"},
{file = "fonttools-4.51.0-cp38-cp38-win32.whl", hash = "sha256:aefa011207ed36cd280babfaa8510b8176f1a77261833e895a9d96e57e44802f"}, {file = "fonttools-4.52.1-cp38-cp38-win32.whl", hash = "sha256:82ffcf4782ceda09842b5b7875b36834c15d7cc0d5dd3d23a658ee9cf8819cd6"},
{file = "fonttools-4.51.0-cp38-cp38-win_amd64.whl", hash = "sha256:865a58b6e60b0938874af0968cd0553bcd88e0b2cb6e588727117bd099eef836"}, {file = "fonttools-4.52.1-cp38-cp38-win_amd64.whl", hash = "sha256:26b43bab5a3bce55ed4d9699b16568795eef5597d154f52dcabef5b4804c4b21"},
{file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:60a3409c9112aec02d5fb546f557bca6efa773dcb32ac147c6baf5f742e6258b"}, {file = "fonttools-4.52.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7e8dbc13c4bc12e60df1b1f5e484112a5e96a6e8bba995e2965988ad73c5ea1b"},
{file = "fonttools-4.51.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7e89853d8bea103c8e3514b9f9dc86b5b4120afb4583b57eb10dfa5afbe0936"}, {file = "fonttools-4.52.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7352ba2226e45e8fba11c3fb416363faf1b06f3f2e80d07d2930401265f3bf9c"},
{file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56fc244f2585d6c00b9bcc59e6593e646cf095a96fe68d62cd4da53dd1287b55"}, {file = "fonttools-4.52.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8834d43763e9e92349ce8bb25dfb612aef6691eefefad885212d5e8f36a94a4"},
{file = "fonttools-4.51.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d145976194a5242fdd22df18a1b451481a88071feadf251221af110ca8f00ce"}, {file = "fonttools-4.52.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2a8c1101d06cc8fca7851dceb67afd53dd6fc0288bacaa632e647bc5afff58"},
{file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5b8cab0c137ca229433570151b5c1fc6af212680b58b15abd797dcdd9dd5051"}, {file = "fonttools-4.52.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a99b738227c0f6f2bbe381b45804a7c46653c95b9d7bf13f6f02884bc87e4930"},
{file = "fonttools-4.51.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:54dcf21a2f2d06ded676e3c3f9f74b2bafded3a8ff12f0983160b13e9f2fb4a7"}, {file = "fonttools-4.52.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:75aa00a16b9a64d1550e2e70d3582c7fe1ef18560e0cf066a4087fe6d11908a2"},
{file = "fonttools-4.51.0-cp39-cp39-win32.whl", hash = "sha256:0118ef998a0699a96c7b28457f15546815015a2710a1b23a7bf6c1be60c01636"}, {file = "fonttools-4.52.1-cp39-cp39-win32.whl", hash = "sha256:c2f09b4aa699cfed4bbebc1829c5f044b41976707dac9230ed00d5a9fc6452c1"},
{file = "fonttools-4.51.0-cp39-cp39-win_amd64.whl", hash = "sha256:599bdb75e220241cedc6faebfafedd7670335d2e29620d207dd0378a4e9ccc5a"}, {file = "fonttools-4.52.1-cp39-cp39-win_amd64.whl", hash = "sha256:78ea6e0d4c89f8e216995923b854dd10bd09e48d3a5a3ccb48bb68f436a409ad"},
{file = "fonttools-4.51.0-py3-none-any.whl", hash = "sha256:15c94eeef6b095831067f72c825eb0e2d48bb4cea0647c1b05c981ecba2bf39f"}, {file = "fonttools-4.52.1-py3-none-any.whl", hash = "sha256:faf5c83f83f7ddebdafdb453d02efdbea7fb494080d7a8d45a8a20db06ea8da5"},
{file = "fonttools-4.51.0.tar.gz", hash = "sha256:dc0673361331566d7a663d7ce0f6fdcbfbdc1f59c6e3ed1165ad7202ca183c68"}, {file = "fonttools-4.52.1.tar.gz", hash = "sha256:8c9204435aa6e5e9479a5ba4e669f05dea28b0c61958e0c0923cb164296d9329"},
] ]
[package.extras] [package.extras]
@ -2173,13 +2173,13 @@ test = ["objgraph", "psutil"]
[[package]] [[package]]
name = "griffe" name = "griffe"
version = "0.45.1" version = "0.45.2"
description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API."
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "griffe-0.45.1-py3-none-any.whl", hash = "sha256:12194c10ae07a7f46708741ad78419362cf8e5c883f449c7c48de1686611b853"}, {file = "griffe-0.45.2-py3-none-any.whl", hash = "sha256:297ec8530d0c68e5b98ff86fb588ebc3aa3559bb5dc21f3caea8d9542a350133"},
{file = "griffe-0.45.1.tar.gz", hash = "sha256:84ce9243a9e63c07d55563a735a0d07ef70b46c455616c174010e7fc816f4648"}, {file = "griffe-0.45.2.tar.gz", hash = "sha256:83ce7dcaafd8cb7f43cbf1a455155015a1eb624b1ffd93249e5e1c4a22b2fdb2"},
] ]
[package.dependencies] [package.dependencies]
@ -2892,13 +2892,13 @@ referencing = ">=0.31.0"
[[package]] [[package]]
name = "jupyter-client" name = "jupyter-client"
version = "8.6.1" version = "8.6.2"
description = "Jupyter protocol implementation and client libraries" description = "Jupyter protocol implementation and client libraries"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "jupyter_client-8.6.1-py3-none-any.whl", hash = "sha256:3b7bd22f058434e3b9a7ea4b1500ed47de2713872288c0d511d19926f99b459f"}, {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"},
{file = "jupyter_client-8.6.1.tar.gz", hash = "sha256:e842515e2bab8e19186d89fdfea7abd15e39dd581f94e399f00e2af5a1652d3f"}, {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"},
] ]
[package.dependencies] [package.dependencies]
@ -2911,7 +2911,7 @@ traitlets = ">=5.3"
[package.extras] [package.extras]
docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"]
test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"]
[[package]] [[package]]
name = "jupyter-core" name = "jupyter-core"
@ -3030,13 +3030,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>
[[package]] [[package]]
name = "jupyterlab" name = "jupyterlab"
version = "4.2.0" version = "4.2.1"
description = "JupyterLab computational environment" description = "JupyterLab computational environment"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "jupyterlab-4.2.0-py3-none-any.whl", hash = "sha256:0dfe9278e25a145362289c555d9beb505697d269c10e99909766af7c440ad3cc"}, {file = "jupyterlab-4.2.1-py3-none-any.whl", hash = "sha256:6ac6e3827b3c890e6e549800e8a4f4aaea6a69321e2240007902aa7a0c56a8e4"},
{file = "jupyterlab-4.2.0.tar.gz", hash = "sha256:356e9205a6a2ab689c47c8fe4919dba6c076e376d03f26baadc05748c2435dd5"}, {file = "jupyterlab-4.2.1.tar.gz", hash = "sha256:a10fb71085a6900820c62d43324005046402ffc8f0fde696103e37238a839507"},
] ]
[package.dependencies] [package.dependencies]
@ -3075,13 +3075,13 @@ files = [
[[package]] [[package]]
name = "jupyterlab-server" name = "jupyterlab-server"
version = "2.27.1" version = "2.27.2"
description = "A set of server components for JupyterLab and JupyterLab like applications." description = "A set of server components for JupyterLab and JupyterLab like applications."
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "jupyterlab_server-2.27.1-py3-none-any.whl", hash = "sha256:f5e26156e5258b24d532c84e7c74cc212e203bff93eb856f81c24c16daeecc75"}, {file = "jupyterlab_server-2.27.2-py3-none-any.whl", hash = "sha256:54aa2d64fd86383b5438d9f0c032f043c4d8c0264b8af9f60bd061157466ea43"},
{file = "jupyterlab_server-2.27.1.tar.gz", hash = "sha256:097b5ac709b676c7284ac9c5e373f11930a561f52cd5a86e4fc7e5a9c8a8631d"}, {file = "jupyterlab_server-2.27.2.tar.gz", hash = "sha256:15cbb349dc45e954e09bacf81b9f9bcb10815ff660fb2034ecd7417db3a7ea27"},
] ]
[package.dependencies] [package.dependencies]
@ -4606,42 +4606,37 @@ files = [
[[package]] [[package]]
name = "onnx" name = "onnx"
version = "1.16.0" version = "1.16.1"
description = "Open Neural Network Exchange" description = "Open Neural Network Exchange"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "onnx-1.16.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:9eadbdce25b19d6216f426d6d99b8bc877a65ed92cbef9707751c6669190ba4f"}, {file = "onnx-1.16.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:bb2d392e5b7060082c2fb38eb5c44f67eb34ff5f0681bd6f45beff9abc6f7094"},
{file = "onnx-1.16.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:034ae21a2aaa2e9c14119a840d2926d213c27aad29e5e3edaa30145a745048e1"}, {file = "onnx-1.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15abf94a7868eed6db15a8b5024ba570c891cae77ca4d0e7258dabdad76980df"},
{file = "onnx-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec22a43d74eb1f2303373e2fbe7fbcaa45fb225f4eb146edfed1356ada7a9aea"}, {file = "onnx-1.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6251910e554f811fdd070164b0bc76d76b067b95576cb9dad4d52ae64fe014b5"},
{file = "onnx-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:298f28a2b5ac09145fa958513d3d1e6b349ccf86a877dbdcccad57713fe360b3"}, {file = "onnx-1.16.1-cp310-cp310-win32.whl", hash = "sha256:c11e3b15eee46cd20767e505cc3ba97457ef5ac93c3e459cdfb77943ff8fe9a7"},
{file = "onnx-1.16.0-cp310-cp310-win32.whl", hash = "sha256:66300197b52beca08bc6262d43c103289c5d45fde43fb51922ed1eb83658cf0c"}, {file = "onnx-1.16.1-cp310-cp310-win_amd64.whl", hash = "sha256:b3d10405706807ec2ef493b2a78519fa0264cf190363e89478585aac1179b596"},
{file = "onnx-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:ae0029f5e47bf70a1a62e7f88c80bca4ef39b844a89910039184221775df5e43"}, {file = "onnx-1.16.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:006ba5059c85ce43e89a1486cc0276d0f1a8ec9c6efd1a9334fd3fa0f6e33b64"},
{file = "onnx-1.16.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:f51179d4af3372b4f3800c558d204b592c61e4b4a18b8f61e0eea7f46211221a"}, {file = "onnx-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1521ea7cd3497ecaf57d3b5e72d637ca5ebca632122a0806a9df99bedbeecdf8"},
{file = "onnx-1.16.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5202559070afec5144332db216c20f2fff8323cf7f6512b0ca11b215eacc5bf3"}, {file = "onnx-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45cf20421aeac03872bea5fd6ebf92abe15c4d1461a2572eb839add5059e2a09"},
{file = "onnx-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77579e7c15b4df39d29465b216639a5f9b74026bdd9e4b6306cd19a32dcfe67c"}, {file = "onnx-1.16.1-cp311-cp311-win32.whl", hash = "sha256:f98e275b4f46a617a9c527e60c02531eae03cf67a04c26db8a1c20acee539533"},
{file = "onnx-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e60ca76ac24b65c25860d0f2d2cdd96d6320d062a01dd8ce87c5743603789b8"}, {file = "onnx-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:95aa20aa65a9035d7543e81713e8b0f611e213fc02171959ef4ee09311d1bf28"},
{file = "onnx-1.16.0-cp311-cp311-win32.whl", hash = "sha256:81b4ee01bc554e8a2b11ac6439882508a5377a1c6b452acd69a1eebb83571117"}, {file = "onnx-1.16.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:32e11d39bee04f927fab09f74c46cf76584094462311bab1aca9ccdae6ed3366"},
{file = "onnx-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:7449241e70b847b9c3eb8dae622df8c1b456d11032a9d7e26e0ee8a698d5bf86"}, {file = "onnx-1.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8884bf53b552873c0c9b072cb8625e7d4e8f3cc0529191632d24e3de58a3b93a"},
{file = "onnx-1.16.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:03a627488b1a9975d95d6a55582af3e14c7f3bb87444725b999935ddd271d352"}, {file = "onnx-1.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595b2830093f81361961295f7b0ebb6000423bcd04123d516d081c306002e387"},
{file = "onnx-1.16.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:c392faeabd9283ee344ccb4b067d1fea9dfc614fa1f0de7c47589efd79e15e78"}, {file = "onnx-1.16.1-cp312-cp312-win32.whl", hash = "sha256:2fde4dd5bc278b3fc8148f460bce8807b2874c66f48529df9444cdbc9ecf456b"},
{file = "onnx-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0efeb46985de08f0efe758cb54ad3457e821a05c2eaf5ba2ccb8cd1602c08084"}, {file = "onnx-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:e69ad8c110d8c37d759cad019d498fdf3fd24e0bfaeb960e52fed0469a5d2974"},
{file = "onnx-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddf14a3d32234f23e44abb73a755cb96a423fac7f004e8f046f36b10214151ee"}, {file = "onnx-1.16.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:0fc189195a40b5862fb77d97410c89823197fe19c1088ce150444eec72f200c1"},
{file = "onnx-1.16.0-cp312-cp312-win32.whl", hash = "sha256:62a2e27ae8ba5fc9b4a2620301446a517b5ffaaf8566611de7a7c2160f5bcf4c"}, {file = "onnx-1.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:496ba17b16a74711081772e1b03f3207959972e351298e51abdc600051027a22"},
{file = "onnx-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:3e0860fea94efde777e81a6f68f65761ed5e5f3adea2e050d7fbe373a9ae05b3"}, {file = "onnx-1.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3faf239b48418b3ea6fe73bd4d86807b903d0b2ebd20b8b8c84f83741b0f18"},
{file = "onnx-1.16.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:70a90649318f3470985439ea078277c9fb2a2e6e2fd7c8f3f2b279402ad6c7e6"}, {file = "onnx-1.16.1-cp38-cp38-win32.whl", hash = "sha256:18b22143836838591f6551b089196e69f60c47fabce52b4b72b4cb37522645aa"},
{file = "onnx-1.16.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:71839546b7f93be4fa807995b182ab4b4414c9dbf049fee11eaaced16fcf8df2"}, {file = "onnx-1.16.1-cp38-cp38-win_amd64.whl", hash = "sha256:8c2b70d602acfb90056fbdc60ef26f4658f964591212a4e9dbbda922ff43061b"},
{file = "onnx-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7665217c45a61eb44718c8e9349d2ad004efa0cb9fbc4be5c6d5e18b9fe12b52"}, {file = "onnx-1.16.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:2bed6fe05905b073206cabbb4463c58050cf8d544192303c09927b229f93ac14"},
{file = "onnx-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5752bbbd5717304a7643643dba383a2fb31e8eb0682f4e7b7d141206328a73b"}, {file = "onnx-1.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5798414332534a41404a7ff83677d49ced01d70160e1541484cce647f2295051"},
{file = "onnx-1.16.0-cp38-cp38-win32.whl", hash = "sha256:257858cbcb2055284f09fa2ae2b1cfd64f5850367da388d6e7e7b05920a40c90"}, {file = "onnx-1.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa7518d6d27f357261a4014079dec364cad6fef827d0b3fe1d3ff59939a68394"},
{file = "onnx-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:209fe84995a28038e29ae8369edd35f33e0ef1ebc3bddbf6584629823469deb1"}, {file = "onnx-1.16.1-cp39-cp39-win32.whl", hash = "sha256:67f372db4fe8fe61e00b762af5b0833aa72b5baa37e7e2f47d8668964ebff411"},
{file = "onnx-1.16.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:8cf3e518b1b1b960be542e7c62bed4e5219e04c85d540817b7027029537dec92"}, {file = "onnx-1.16.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c059fea6229c44d2d39c8f6e2f2f0d676d587c97f4c854c86f3e7bc97e0b31c"},
{file = "onnx-1.16.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:30f02beaf081c7d9fa3a8c566a912fc4408e28fc33b1452d58f890851691d364"}, {file = "onnx-1.16.1.tar.gz", hash = "sha256:8299193f0f2a3849bfc069641aa8e4f93696602da8d165632af8ee48ec7556b6"},
{file = "onnx-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fb29a9a692b522deef1f6b8f2145da62c0c43ea1ed5b4c0f66f827fdc28847d"},
{file = "onnx-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7755cbd5f4e47952e37276ea5978a46fc8346684392315902b5ed4a719d87d06"},
{file = "onnx-1.16.0-cp39-cp39-win32.whl", hash = "sha256:7532343dc5b8b5e7c3e3efa441a3100552f7600155c4db9120acd7574f64ffbf"},
{file = "onnx-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:d7886c05aa6d583ec42f6287678923c1e343afc4350e49d5b36a0023772ffa22"},
{file = "onnx-1.16.0.tar.gz", hash = "sha256:237c6987c6c59d9f44b6136f5819af79574f8d96a760a1fa843bede11f3822f7"},
] ]
[package.dependencies] [package.dependencies]
@ -4946,6 +4941,7 @@ optional = false
python-versions = ">=3.9" python-versions = ">=3.9"
files = [ files = [
{file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"},
{file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"},
{file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"},
{file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"},
{file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"},
@ -4966,6 +4962,7 @@ files = [
{file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"},
{file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"},
{file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"},
{file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"},
{file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"},
{file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"},
{file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"},
@ -5681,6 +5678,25 @@ files = [
[package.dependencies] [package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pydantic-settings"
version = "2.2.1"
description = "Settings management using Pydantic"
optional = false
python-versions = ">=3.8"
files = [
{file = "pydantic_settings-2.2.1-py3-none-any.whl", hash = "sha256:0235391d26db4d2190cb9b31051c4b46882d28a51533f97440867f012d4da091"},
{file = "pydantic_settings-2.2.1.tar.gz", hash = "sha256:00b9f6a5e95553590434c0fa01ead0b216c3e10bc54ae02e37f359948643c5ed"},
]
[package.dependencies]
pydantic = ">=2.3.0"
python-dotenv = ">=0.21.0"
[package.extras]
toml = ["tomli (>=2.0.1)"]
yaml = ["pyyaml (>=6.0.1)"]
[[package]] [[package]]
name = "pygments" name = "pygments"
version = "2.18.0" version = "2.18.0"
@ -6041,7 +6057,6 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
@ -6666,36 +6681,36 @@ tests = ["black (>=24.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.9)", "numpydoc (
[[package]] [[package]]
name = "scipy" name = "scipy"
version = "1.13.0" version = "1.13.1"
description = "Fundamental algorithms for scientific computing in Python" description = "Fundamental algorithms for scientific computing in Python"
optional = false optional = false
python-versions = ">=3.9" python-versions = ">=3.9"
files = [ files = [
{file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"},
{file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"},
{file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989"},
{file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f"},
{file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, {file = "scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94"},
{file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, {file = "scipy-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54"},
{file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, {file = "scipy-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9"},
{file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, {file = "scipy-1.13.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326"},
{file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299"},
{file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa"},
{file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, {file = "scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59"},
{file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, {file = "scipy-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b"},
{file = "scipy-1.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0de696f589681c2802f9090fff730c218f7c51ff49bf252b6a97ec4a5d19e8b"}, {file = "scipy-1.13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d72782f39716b2b3509cd7c33cdc08c96f2f4d2b06d51e52fb45a19ca0c86a1"},
{file = "scipy-1.13.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:b2a3ff461ec4756b7e8e42e1c681077349a038f0686132d623fa404c0bee2551"}, {file = "scipy-1.13.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:017367484ce5498445aade74b1d5ab377acdc65e27095155e448c88497755a5d"},
{file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf9fe63e7a4bf01d3645b13ff2aa6dea023d38993f42aaac81a18b1bda7a82a"}, {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627"},
{file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7626dfd91cdea5714f343ce1176b6c4745155d234f1033584154f60ef1ff42"}, {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884"},
{file = "scipy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:109d391d720fcebf2fbe008621952b08e52907cf4c8c7efc7376822151820820"}, {file = "scipy-1.13.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16"},
{file = "scipy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:8930ae3ea371d6b91c203b1032b9600d69c568e537b7988a3073dfe4d4774f21"}, {file = "scipy-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:cdd7dacfb95fea358916410ec61bbc20440f7860333aee6d882bb8046264e949"},
{file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, {file = "scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5"},
{file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, {file = "scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24"},
{file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004"},
{file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d"},
{file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, {file = "scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c"},
{file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, {file = "scipy-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2"},
{file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, {file = "scipy-1.13.1.tar.gz", hash = "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c"},
] ]
[package.dependencies] [package.dependencies]
@ -6735,13 +6750,13 @@ win32 = ["pywin32"]
[[package]] [[package]]
name = "sentry-sdk" name = "sentry-sdk"
version = "2.2.1" version = "2.3.1"
description = "Python client for Sentry (https://sentry.io)" description = "Python client for Sentry (https://sentry.io)"
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
files = [ files = [
{file = "sentry_sdk-2.2.1-py2.py3-none-any.whl", hash = "sha256:7d617a1b30e80c41f3b542347651fcf90bb0a36f3a398be58b4f06b79c8d85bc"}, {file = "sentry_sdk-2.3.1-py2.py3-none-any.whl", hash = "sha256:c5aeb095ba226391d337dd42a6f9470d86c9fc236ecc71cfc7cd1942b45010c6"},
{file = "sentry_sdk-2.2.1.tar.gz", hash = "sha256:8aa2ec825724d8d9d645cab68e6034928b1a6a148503af3e361db3fa6401183f"}, {file = "sentry_sdk-2.3.1.tar.gz", hash = "sha256:139a71a19f5e9eb5d3623942491ce03cf8ebc14ea2e39ba3e6fe79560d8a5b1f"},
] ]
[package.dependencies] [package.dependencies]
@ -7780,24 +7795,24 @@ files = [
[[package]] [[package]]
name = "types-setuptools" name = "types-setuptools"
version = "69.5.0.20240522" version = "70.0.0.20240524"
description = "Typing stubs for setuptools" description = "Typing stubs for setuptools"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "types-setuptools-69.5.0.20240522.tar.gz", hash = "sha256:c5a97601b2d040d3b9fcd0633730f0a8c86ebef208552525c97301427f261549"}, {file = "types-setuptools-70.0.0.20240524.tar.gz", hash = "sha256:e31fee7b9d15ef53980526579ac6089b3ae51a005a281acf97178e90ac71aff6"},
{file = "types_setuptools-69.5.0.20240522-py3-none-any.whl", hash = "sha256:e27231cbc80648cfaee4921d2f1150107fdf8d33666958abf2aba0191a82688b"}, {file = "types_setuptools-70.0.0.20240524-py3-none-any.whl", hash = "sha256:8f5379b9948682d72a9ab531fbe52932e84c4f38deda570255f9bae3edd766bc"},
] ]
[[package]] [[package]]
name = "typing-extensions" name = "typing-extensions"
version = "4.11.0" version = "4.12.0"
description = "Backported and Experimental Type Hints for Python 3.8+" description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"},
{file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"},
] ]
[[package]] [[package]]
@ -8005,40 +8020,43 @@ colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\" and python
[[package]] [[package]]
name = "watchdog" name = "watchdog"
version = "4.0.0" version = "4.0.1"
description = "Filesystem events monitoring" description = "Filesystem events monitoring"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"},
{file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"},
{file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"},
{file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"},
{file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"},
{file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"},
{file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"},
{file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"},
{file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"},
{file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"},
{file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"},
{file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"},
{file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"},
{file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"},
{file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"},
{file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"},
{file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"},
{file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"},
{file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"},
{file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"},
{file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"},
{file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"},
{file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"},
{file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"},
{file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"},
{file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"},
{file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"},
{file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"},
{file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"},
{file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"},
{file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"},
{file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"},
] ]
[package.extras] [package.extras]
@ -8518,4 +8536,4 @@ weaviate = ["weaviate-client"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = ">=3.9.0,<3.12" python-versions = ">=3.9.0,<3.12"
content-hash = "a8347b417527887fff3aea06f51acdd4d49d9ca76a8901005673c9831d42a765" content-hash = "325ea9dcaff8a1c09711aae43ed06bc59b18b3da2fbbb1a9499cb708f7f79e74"

View file

@ -71,6 +71,7 @@ protobuf = "<5.0.0"
langchain-community = "0.0.38" langchain-community = "0.0.38"
deepeval = "^0.21.42" deepeval = "^0.21.42"
falkordb = "^1.0.4" falkordb = "^1.0.4"
pydantic-settings = "^2.2.1"
[tool.poetry.extras] [tool.poetry.extras]

2
pytest.ini Normal file
View file

@ -0,0 +1,2 @@
[pytest]
addopts = tests/

0
tests/__init__.py Normal file
View file

7
tests/import_test.py Normal file
View file

@ -0,0 +1,7 @@
def test_import_cognee():
try:
import cognee
assert True # Pass the test if no error occurs
except ImportError as e:
assert False, f"Failed to import cognee: {e}"