Merge branch 'dev' into feature/mistral_llm_provider

This commit is contained in:
Aniruddha Mandal 2025-09-29 13:08:08 +05:30
commit 3b57a3fcfe
50 changed files with 1136 additions and 1089 deletions

View file

@ -176,7 +176,14 @@ ENABLE_BACKEND_ACCESS_CONTROL=False
# Cognee Cloud API settings for syncing data to/from cloud infrastructure
COGNEE_CLOUD_API_URL="http://localhost:8001"
COGNEE_CLOUD_AUTH_TOKEN="your-auth-token"
COGNEE_CLOUD_AUTH_TOKEN="your-api-key"
################################################################################
# UI Settings
################################################################################
# URL where the frontend is served, defaults to http://localhost:3000
UI_APP_URL=http://localhost:3000
################################################################################
# 🛠️ DEV Settings
@ -196,6 +203,16 @@ LITELLM_LOG="ERROR"
# DEFAULT_USER_EMAIL=""
# DEFAULT_USER_PASSWORD=""
################################################################################
# 📂 AWS Settings
################################################################################
#AWS_REGION=""
#AWS_ENDPOINT_URL=""
#AWS_ACCESS_KEY_ID=""
#AWS_SECRET_ACCESS_KEY=""
#AWS_SESSION_TOKEN=""
------------------------------- END OF POSSIBLE SETTINGS -------------------------------

View file

@ -24,7 +24,7 @@ runs:
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
- name: Rebuild uv lockfile
shell: bash
run: |

View file

@ -188,6 +188,7 @@ jobs:
uses: ./.github/actions/cognee_setup
with:
python-version: ${{ inputs.python-version }}
extra-dependencies: "baml"
- name: Run Simple Examples
run: uv run python ./examples/python/simple_example.py

View file

@ -166,9 +166,6 @@ jobs:
python-version: '3.11.x'
extra-dependencies: "aws"
- name: Dependencies already installed
run: echo "Dependencies already installed in setup"
- name: Run S3 Bucket Test
env:
ENV: 'dev'

View file

@ -18,6 +18,7 @@ jobs:
uses: ./.github/actions/cognee_setup
with:
python-version: '3.11.x'
extra-dependencies: "aws"
- name: Run S3 File Storage Test
env:

View file

@ -2,10 +2,11 @@
import Link from "next/link";
import Image from "next/image";
import { useBoolean } from "@/utils";
import { useEffect } from "react";
import { useBoolean, fetch } from "@/utils";
import { CloseIcon, CloudIcon, CogneeIcon } from "../Icons";
import { CTAButton, GhostButton, IconButton, Modal } from "../elements";
import { CTAButton, GhostButton, IconButton, Modal, StatusDot } from "../elements";
import syncData from "@/modules/cloud/syncData";
interface HeaderProps {
@ -23,6 +24,12 @@ export default function Header({ user }: HeaderProps) {
setFalse: closeSyncModal,
} = useBoolean(false);
const {
value: isMCPConnected,
setTrue: setMCPConnected,
setFalse: setMCPDisconnected,
} = useBoolean(false);
const handleDataSyncConfirm = () => {
syncData()
.finally(() => {
@ -30,6 +37,19 @@ export default function Header({ user }: HeaderProps) {
});
};
useEffect(() => {
const checkMCPConnection = () => {
fetch.checkMCPHealth()
.then(() => setMCPConnected())
.catch(() => setMCPDisconnected());
};
checkMCPConnection();
const interval = setInterval(checkMCPConnection, 30000);
return () => clearInterval(interval);
}, [setMCPConnected, setMCPDisconnected]);
return (
<>
<header className="relative flex flex-row h-14 min-h-14 px-5 items-center justify-between w-full max-w-[1920px] mx-auto">
@ -39,6 +59,10 @@ export default function Header({ user }: HeaderProps) {
</div>
<div className="flex flex-row items-center gap-2.5">
<Link href="/mcp-status" className="!text-indigo-600 pl-4 pr-4">
<StatusDot className="mr-2" isActive={isMCPConnected} />
{ isMCPConnected ? "MCP connected" : "MCP disconnected" }
</Link>
<GhostButton onClick={openSyncModal} className="text-indigo-600 gap-3 pl-4 pr-4">
<CloudIcon />
<div>Sync</div>

View file

@ -0,0 +1,13 @@
import React from "react";
const StatusDot = ({ isActive, className }: { isActive: boolean, className?: string }) => {
return (
<span
className={`inline-block w-3 h-3 rounded-full ${className} ${
isActive ? "bg-green-500" : "bg-red-500"
}`}
/>
);
};
export default StatusDot;

View file

@ -8,5 +8,6 @@ export { default as IconButton } from "./IconButton";
export { default as GhostButton } from "./GhostButton";
export { default as NeutralButton } from "./NeutralButton";
export { default as StatusIndicator } from "./StatusIndicator";
export { default as StatusDot } from "./StatusDot";
export { default as Accordion } from "./Accordion";
export { default as Notebook } from "./Notebook";

View file

@ -9,6 +9,8 @@ const backendApiUrl = process.env.NEXT_PUBLIC_BACKEND_API_URL || "http://localho
const cloudApiUrl = process.env.NEXT_PUBLIC_CLOUD_API_URL || "http://localhost:8001";
const mcpApiUrl = process.env.NEXT_PUBLIC_MCP_API_URL || "http://localhost:8001";
let apiKey: string | null = process.env.NEXT_PUBLIC_COGWIT_API_KEY || null;
let accessToken: string | null = null;
@ -66,6 +68,10 @@ fetch.checkHealth = () => {
return global.fetch(`${backendApiUrl.replace("/api", "")}/health`);
};
fetch.checkMCPHealth = () => {
return global.fetch(`${mcpApiUrl.replace("/api", "")}/health`);
};
fetch.setApiKey = (newApiKey: string) => {
apiKey = newApiKey;
};

View file

@ -48,27 +48,27 @@ if [ "$ENVIRONMENT" = "dev" ] || [ "$ENVIRONMENT" = "local" ]; then
if [ "$DEBUG" = "true" ]; then
echo "Waiting for the debugger to attach..."
if [ "$TRANSPORT_MODE" = "sse" ]; then
exec python -m debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m cognee --transport sse --host 0.0.0.0 --port $HTTP_PORT --no-migration
exec python -m debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m cognee-mcp --transport sse --host 0.0.0.0 --port $HTTP_PORT --no-migration
elif [ "$TRANSPORT_MODE" = "http" ]; then
exec python -m debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m cognee --transport http --host 0.0.0.0 --port $HTTP_PORT --no-migration
exec python -m debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m cognee-mcp --transport http --host 0.0.0.0 --port $HTTP_PORT --no-migration
else
exec python -m debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m cognee --transport stdio --no-migration
exec python -m debugpy --wait-for-client --listen 0.0.0.0:$DEBUG_PORT -m cognee-mcp --transport stdio --no-migration
fi
else
if [ "$TRANSPORT_MODE" = "sse" ]; then
exec cognee --transport sse --host 0.0.0.0 --port $HTTP_PORT --no-migration
exec cognee-mcp --transport sse --host 0.0.0.0 --port $HTTP_PORT --no-migration
elif [ "$TRANSPORT_MODE" = "http" ]; then
exec cognee --transport http --host 0.0.0.0 --port $HTTP_PORT --no-migration
exec cognee-mcp --transport http --host 0.0.0.0 --port $HTTP_PORT --no-migration
else
exec cognee --transport stdio --no-migration
exec cognee-mcp --transport stdio --no-migration
fi
fi
else
if [ "$TRANSPORT_MODE" = "sse" ]; then
exec cognee --transport sse --host 0.0.0.0 --port $HTTP_PORT --no-migration
exec cognee-mcp --transport sse --host 0.0.0.0 --port $HTTP_PORT --no-migration
elif [ "$TRANSPORT_MODE" = "http" ]; then
exec cognee --transport http --host 0.0.0.0 --port $HTTP_PORT --no-migration
exec cognee-mcp --transport http --host 0.0.0.0 --port $HTTP_PORT --no-migration
else
exec cognee --transport stdio --no-migration
exec cognee-mcp --transport stdio --no-migration
fi
fi

View file

@ -36,4 +36,4 @@ dev = [
allow-direct-references = true
[project.scripts]
cognee = "src:main"
cognee-mcp = "src:main"

View file

@ -19,6 +19,10 @@ from cognee.api.v1.cognify.code_graph_pipeline import run_code_graph_pipeline
from cognee.modules.search.types import SearchType
from cognee.shared.data_models import KnowledgeGraph
from cognee.modules.storage.utils import JSONEncoder
from starlette.responses import JSONResponse
from starlette.middleware import Middleware
from starlette.middleware.cors import CORSMiddleware
import uvicorn
try:
@ -38,6 +42,53 @@ mcp = FastMCP("Cognee")
logger = get_logger()
async def run_sse_with_cors():
"""Custom SSE transport with CORS middleware."""
sse_app = mcp.sse_app()
sse_app.add_middleware(
CORSMiddleware,
allow_origins=["http://localhost:3000"],
allow_credentials=True,
allow_methods=["GET"],
allow_headers=["*"],
)
config = uvicorn.Config(
sse_app,
host=mcp.settings.host,
port=mcp.settings.port,
log_level=mcp.settings.log_level.lower(),
)
server = uvicorn.Server(config)
await server.serve()
async def run_http_with_cors():
"""Custom HTTP transport with CORS middleware."""
http_app = mcp.streamable_http_app()
http_app.add_middleware(
CORSMiddleware,
allow_origins=["http://localhost:3000"],
allow_credentials=True,
allow_methods=["GET"],
allow_headers=["*"],
)
config = uvicorn.Config(
http_app,
host=mcp.settings.host,
port=mcp.settings.port,
log_level=mcp.settings.log_level.lower(),
)
server = uvicorn.Server(config)
await server.serve()
@mcp.custom_route("/health", methods=["GET"])
async def health_check(request):
return JSONResponse({"status": "ok"})
@mcp.tool()
async def cognee_add_developer_rules(
base_path: str = ".", graph_model_file: str = None, graph_model_name: str = None
@ -975,12 +1026,12 @@ async def main():
await mcp.run_stdio_async()
elif args.transport == "sse":
logger.info(f"Running MCP server with SSE transport on {args.host}:{args.port}")
await mcp.run_sse_async()
await run_sse_with_cors()
elif args.transport == "http":
logger.info(
f"Running MCP server with Streamable HTTP transport on {args.host}:{args.port}{args.path}"
)
await mcp.run_streamable_http_async()
await run_http_with_cors()
if __name__ == "__main__":

View file

@ -3,7 +3,6 @@
import os
import uvicorn
import sentry_sdk
from traceback import format_exc
from contextlib import asynccontextmanager
from fastapi import Request
@ -42,11 +41,18 @@ from cognee.modules.users.methods.get_authenticated_user import REQUIRE_AUTHENTI
logger = get_logger()
if os.getenv("ENV", "prod") == "prod":
sentry_sdk.init(
dsn=os.getenv("SENTRY_REPORTING_URL"),
traces_sample_rate=1.0,
profiles_sample_rate=1.0,
)
try:
import sentry_sdk
sentry_sdk.init(
dsn=os.getenv("SENTRY_REPORTING_URL"),
traces_sample_rate=1.0,
profiles_sample_rate=1.0,
)
except ImportError:
logger.info(
"Sentry SDK not available. Install with 'pip install cognee\"[monitoring]\"' to enable error monitoring."
)
app_environment = os.getenv("ENV", "prod")
@ -81,7 +87,7 @@ if CORS_ALLOWED_ORIGINS:
]
else:
allowed_origins = [
"http://localhost:3000",
os.getenv("UI_APP_URL", "http://localhost:3000"),
] # Block all except explicitly set origins
app.add_middleware(

View file

@ -1 +1 @@
from .ui import start_ui, stop_ui, ui
from .ui import start_ui

View file

@ -1,5 +1,5 @@
import os
import signal
import socket
import subprocess
import threading
import time
@ -7,7 +7,7 @@ import webbrowser
import zipfile
import requests
from pathlib import Path
from typing import Callable, Optional, Tuple
from typing import Callable, Optional, Tuple, List
import tempfile
import shutil
@ -17,6 +17,80 @@ from cognee.version import get_cognee_version
logger = get_logger()
def _stream_process_output(
process: subprocess.Popen, stream_name: str, prefix: str, color_code: str = ""
) -> threading.Thread:
"""
Stream output from a process with a prefix to identify the source.
Args:
process: The subprocess to monitor
stream_name: 'stdout' or 'stderr'
prefix: Text prefix for each line (e.g., '[BACKEND]', '[FRONTEND]')
color_code: ANSI color code for the prefix (optional)
Returns:
Thread that handles the streaming
"""
def stream_reader():
stream = getattr(process, stream_name)
if stream is None:
return
reset_code = "\033[0m" if color_code else ""
try:
for line in iter(stream.readline, b""):
if line:
line_text = line.decode("utf-8").rstrip()
if line_text:
print(f"{color_code}{prefix}{reset_code} {line_text}", flush=True)
except Exception:
pass
finally:
if stream:
stream.close()
thread = threading.Thread(target=stream_reader, daemon=True)
thread.start()
return thread
def _is_port_available(port: int) -> bool:
"""
Check if a port is available on localhost.
Returns True if the port is available, False otherwise.
"""
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.settimeout(1) # 1 second timeout
result = sock.connect_ex(("localhost", port))
return result != 0 # Port is available if connection fails
except Exception:
return False
def _check_required_ports(ports_to_check: List[Tuple[int, str]]) -> Tuple[bool, List[str]]:
"""
Check if all required ports are available on localhost.
Args:
ports_to_check: List of (port, service_name) tuples
Returns:
Tuple of (all_available: bool, unavailable_services: List[str])
"""
unavailable = []
for port, service_name in ports_to_check:
if not _is_port_available(port):
unavailable.append(f"{service_name} (port {port})")
logger.error(f"Port {port} is already in use for {service_name}")
return len(unavailable) == 0, unavailable
def normalize_version_for_comparison(version: str) -> str:
"""
Normalize version string for comparison.
@ -327,55 +401,111 @@ def prompt_user_for_download() -> bool:
def start_ui(
pid_callback: Callable[[int], None],
host: str = "localhost",
port: int = 3000,
open_browser: bool = True,
auto_download: bool = False,
start_backend: bool = False,
backend_host: str = "localhost",
backend_port: int = 8000,
start_mcp: bool = False,
mcp_port: int = 8001,
) -> Optional[subprocess.Popen]:
"""
Start the cognee frontend UI server, optionally with the backend API server.
Start the cognee frontend UI server, optionally with the backend API server and MCP server.
This function will:
1. Optionally start the cognee backend API server
2. Find the cognee-frontend directory (development) or download it (pip install)
3. Check if Node.js and npm are available (for development mode)
4. Install dependencies if needed (development mode)
5. Start the frontend server
6. Optionally open the browser
2. Optionally start the cognee MCP server
3. Find the cognee-frontend directory (development) or download it (pip install)
4. Check if Node.js and npm are available (for development mode)
5. Install dependencies if needed (development mode)
6. Start the frontend server
7. Optionally open the browser
Args:
pid_callback: Callback to notify with PID of each spawned process
host: Host to bind the frontend server to (default: localhost)
port: Port to run the frontend server on (default: 3000)
open_browser: Whether to open the browser automatically (default: True)
auto_download: If True, download frontend without prompting (default: False)
start_backend: If True, also start the cognee API backend server (default: False)
backend_host: Host to bind the backend server to (default: localhost)
backend_port: Port to run the backend server on (default: 8000)
start_mcp: If True, also start the cognee MCP server (default: False)
mcp_port: Port to run the MCP server on (default: 8001)
Returns:
subprocess.Popen object representing the running frontend server, or None if failed
Note: If backend is started, it runs in a separate process that will be cleaned up
when the frontend process is terminated.
Note: If backend and/or MCP server are started, they run in separate processes
that will be cleaned up when the frontend process is terminated.
Example:
>>> import cognee
>>> def dummy_callback(pid): pass
>>> # Start just the frontend
>>> server = cognee.start_ui()
>>> server = cognee.start_ui(dummy_callback)
>>>
>>> # Start both frontend and backend
>>> server = cognee.start_ui(start_backend=True)
>>> server = cognee.start_ui(dummy_callback, start_backend=True)
>>> # UI will be available at http://localhost:3000
>>> # API will be available at http://localhost:8000
>>> # To stop both servers later:
>>>
>>> # Start frontend with MCP server
>>> server = cognee.start_ui(dummy_callback, start_mcp=True)
>>> # UI will be available at http://localhost:3000
>>> # MCP server will be available at http://127.0.0.1:8001/sse
>>> # To stop all servers later:
>>> server.terminate()
"""
logger.info("Starting cognee UI...")
ports_to_check = [(port, "Frontend UI")]
if start_backend:
ports_to_check.append((backend_port, "Backend API"))
if start_mcp:
ports_to_check.append((mcp_port, "MCP Server"))
logger.info("Checking port availability...")
all_ports_available, unavailable_services = _check_required_ports(ports_to_check)
if not all_ports_available:
error_msg = f"Cannot start cognee UI: The following services have ports already in use: {', '.join(unavailable_services)}"
logger.error(error_msg)
logger.error("Please stop the conflicting services or change the port configuration.")
return None
logger.info("✓ All required ports are available")
backend_process = None
if start_mcp:
logger.info("Starting Cognee MCP server with Docker...")
cwd = os.getcwd()
env_file = os.path.join(cwd, ".env")
try:
mcp_process = subprocess.Popen(
[
"docker",
"run",
"-p",
f"{mcp_port}:8000",
"--rm",
"--env-file",
env_file,
"-e",
"TRANSPORT_MODE=sse",
"cognee/cognee-mcp:daulet-dev",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
preexec_fn=os.setsid if hasattr(os, "setsid") else None,
)
_stream_process_output(mcp_process, "stdout", "[MCP]", "\033[34m") # Blue
_stream_process_output(mcp_process, "stderr", "[MCP]", "\033[34m") # Blue
pid_callback(mcp_process.pid)
logger.info(f"✓ Cognee MCP server starting on http://127.0.0.1:{mcp_port}/sse")
except Exception as e:
logger.error(f"Failed to start MCP server with Docker: {str(e)}")
# Start backend server if requested
if start_backend:
logger.info("Starting cognee backend API server...")
@ -389,16 +519,19 @@ def start_ui(
"uvicorn",
"cognee.api.client:app",
"--host",
backend_host,
"localhost",
"--port",
str(backend_port),
],
# Inherit stdout/stderr from parent process to show logs
stdout=None,
stderr=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
preexec_fn=os.setsid if hasattr(os, "setsid") else None,
)
# Start threads to stream backend output with prefix
_stream_process_output(backend_process, "stdout", "[BACKEND]", "\033[32m") # Green
_stream_process_output(backend_process, "stderr", "[BACKEND]", "\033[32m") # Green
pid_callback(backend_process.pid)
# Give the backend a moment to start
@ -408,7 +541,7 @@ def start_ui(
logger.error("Backend server failed to start - process exited early")
return None
logger.info(f"✓ Backend API started at http://{backend_host}:{backend_port}")
logger.info(f"✓ Backend API started at http://localhost:{backend_port}")
except Exception as e:
logger.error(f"Failed to start backend server: {str(e)}")
@ -453,11 +586,11 @@ def start_ui(
# Prepare environment variables
env = os.environ.copy()
env["HOST"] = host
env["HOST"] = "localhost"
env["PORT"] = str(port)
# Start the development server
logger.info(f"Starting frontend server at http://{host}:{port}")
logger.info(f"Starting frontend server at http://localhost:{port}")
logger.info("This may take a moment to compile and start...")
try:
@ -468,10 +601,13 @@ def start_ui(
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
preexec_fn=os.setsid if hasattr(os, "setsid") else None,
)
# Start threads to stream frontend output with prefix
_stream_process_output(process, "stdout", "[FRONTEND]", "\033[33m") # Yellow
_stream_process_output(process, "stderr", "[FRONTEND]", "\033[33m") # Yellow
pid_callback(process.pid)
# Give it a moment to start up
@ -479,10 +615,7 @@ def start_ui(
# Check if process is still running
if process.poll() is not None:
stdout, stderr = process.communicate()
logger.error("Frontend server failed to start:")
logger.error(f"stdout: {stdout}")
logger.error(f"stderr: {stderr}")
logger.error("Frontend server failed to start - check the logs above for details")
return None
# Open browser if requested
@ -491,7 +624,7 @@ def start_ui(
def open_browser_delayed():
time.sleep(5) # Give Next.js time to fully start
try:
webbrowser.open(f"http://{host}:{port}") # TODO: use dashboard url?
webbrowser.open(f"http://localhost:{port}")
except Exception as e:
logger.warning(f"Could not open browser automatically: {e}")
@ -499,13 +632,9 @@ def start_ui(
browser_thread.start()
logger.info("✓ Cognee UI is starting up...")
logger.info(f"✓ Open your browser to: http://{host}:{port}")
logger.info(f"✓ Open your browser to: http://localhost:{port}")
logger.info("✓ The UI will be available once Next.js finishes compiling")
# Store backend process reference in the frontend process for cleanup
if backend_process:
process._cognee_backend_process = backend_process
return process
except Exception as e:
@ -523,102 +652,3 @@ def start_ui(
except (OSError, ProcessLookupError):
pass
return None
def stop_ui(process: subprocess.Popen) -> bool:
"""
Stop a running UI server process and backend process (if started), along with all their children.
Args:
process: The subprocess.Popen object returned by start_ui()
Returns:
bool: True if stopped successfully, False otherwise
"""
if not process:
return False
success = True
try:
# First, stop the backend process if it exists
backend_process = getattr(process, "_cognee_backend_process", None)
if backend_process:
logger.info("Stopping backend server...")
try:
backend_process.terminate()
try:
backend_process.wait(timeout=5)
logger.info("Backend server stopped gracefully")
except subprocess.TimeoutExpired:
logger.warning("Backend didn't terminate gracefully, forcing kill")
backend_process.kill()
backend_process.wait()
logger.info("Backend server stopped")
except Exception as e:
logger.error(f"Error stopping backend server: {str(e)}")
success = False
# Now stop the frontend process
logger.info("Stopping frontend server...")
# Try to terminate the process group (includes child processes like Next.js)
if hasattr(os, "killpg"):
try:
# Kill the entire process group
os.killpg(os.getpgid(process.pid), signal.SIGTERM)
logger.debug("Sent SIGTERM to process group")
except (OSError, ProcessLookupError):
# Fall back to terminating just the main process
process.terminate()
logger.debug("Terminated main process only")
else:
process.terminate()
logger.debug("Terminated main process (Windows)")
try:
process.wait(timeout=10)
logger.info("Frontend server stopped gracefully")
except subprocess.TimeoutExpired:
logger.warning("Frontend didn't terminate gracefully, forcing kill")
# Force kill the process group
if hasattr(os, "killpg"):
try:
os.killpg(os.getpgid(process.pid), signal.SIGKILL)
logger.debug("Sent SIGKILL to process group")
except (OSError, ProcessLookupError):
process.kill()
logger.debug("Force killed main process only")
else:
process.kill()
logger.debug("Force killed main process (Windows)")
process.wait()
if success:
logger.info("UI servers stopped successfully")
return success
except Exception as e:
logger.error(f"Error stopping UI servers: {str(e)}")
return False
# Convenience function similar to DuckDB's approach
def ui() -> Optional[subprocess.Popen]:
"""
Convenient alias for start_ui() with default parameters.
Similar to how DuckDB provides simple ui() function.
"""
return start_ui()
if __name__ == "__main__":
# Test the UI startup
server = start_ui()
if server:
try:
input("Press Enter to stop the server...")
finally:
stop_ui(server)

View file

@ -204,19 +204,27 @@ def main() -> int:
nonlocal spawned_pids
spawned_pids.append(pid)
frontend_port = 3000
start_backend, backend_port = True, 8000
start_mcp, mcp_port = True, 8001
server_process = start_ui(
host="localhost",
port=3000,
open_browser=True,
start_backend=True,
auto_download=True,
pid_callback=pid_callback,
port=frontend_port,
open_browser=True,
auto_download=True,
start_backend=start_backend,
backend_port=backend_port,
start_mcp=start_mcp,
mcp_port=mcp_port,
)
if server_process:
fmt.success("UI server started successfully!")
fmt.echo("The interface is available at: http://localhost:3000")
fmt.echo("The API backend is available at: http://localhost:8000")
fmt.echo(f"The interface is available at: http://localhost:{frontend_port}")
if start_backend:
fmt.echo(f"The API backend is available at: http://localhost:{backend_port}")
if start_mcp:
fmt.echo(f"The MCP server is available at: http://localhost:{mcp_port}")
fmt.note("Press Ctrl+C to stop the server...")
try:

View file

@ -1,6 +1,6 @@
import os
import json
import pandas as pd
import subprocess
import modal
import streamlit as st
@ -78,6 +78,14 @@ def main():
}
)
try:
import pandas as pd
except ImportError:
st.error(
"Pandas is required for the evaluation dashboard. Install with 'pip install cognee\"[evals]\"' to use this feature."
)
return
df = pd.DataFrame(records)
if df.empty:
st.warning("No JSON files found in the volume.")

View file

@ -1,48 +0,0 @@
from sklearn.feature_extraction.text import TfidfVectorizer
from cognee.infrastructure.data.exceptions.exceptions import KeywordExtractionError
from cognee.shared.utils import extract_pos_tags
def extract_keywords(text: str) -> list[str]:
"""
Extract keywords from the provided text string.
This function raises an KeyWordExtractionError if the input text is empty. It processes the
text to extract parts of speech, focusing on nouns, and uses TF-IDF to identify the most
relevant keywords based on their frequency. The function returns a list of up to 15
keywords, each having more than 3 characters.
Parameters:
-----------
- text (str): The input text from which to extract keywords.
Returns:
--------
- list[str]: A list of keywords extracted from the text, containing up to 15 nouns
with more than 3 characters.
"""
if len(text) == 0:
raise KeywordExtractionError()
tags = extract_pos_tags(text)
nouns = [word for (word, tag) in tags if tag == "NN"]
vectorizer = TfidfVectorizer()
tfidf = vectorizer.fit_transform(nouns)
top_nouns = sorted(
vectorizer.vocabulary_, key=lambda x: tfidf[0, vectorizer.vocabulary_[x]], reverse=True
)
keywords = []
for word in top_nouns:
if len(word) > 3:
keywords.append(word)
if len(keywords) >= 15:
break
return keywords

View file

@ -39,8 +39,16 @@ def create_relational_engine(
connection_string = f"sqlite+aiosqlite:///{db_path}/{db_name}"
if db_provider == "postgres":
connection_string = (
f"postgresql+asyncpg://{db_username}:{db_password}@{db_host}:{db_port}/{db_name}"
)
try:
# Test if asyncpg is available
import asyncpg
connection_string = (
f"postgresql+asyncpg://{db_username}:{db_password}@{db_host}:{db_port}/{db_name}"
)
except ImportError:
raise ImportError(
"PostgreSQL dependencies are not installed. Please install with 'pip install cognee\"[postgres]\"' or 'pip install cognee\"[postgres-binary]\"' to use PostgreSQL functionality."
)
return SQLAlchemyAdapter(connection_string)

View file

@ -66,7 +66,12 @@ def create_vector_engine(
f"postgresql+asyncpg://{db_username}:{db_password}@{db_host}:{db_port}/{db_name}"
)
from .pgvector.PGVectorAdapter import PGVectorAdapter
try:
from .pgvector.PGVectorAdapter import PGVectorAdapter
except ImportError:
raise ImportError(
"PostgreSQL dependencies are not installed. Please install with 'pip install cognee\"[postgres]\"' or 'pip install cognee\"[postgres-binary]\"' to use PGVector functionality."
)
return PGVectorAdapter(
connection_string,

View file

@ -205,9 +205,12 @@ class LanceDBAdapter(VectorDBInterface):
collection = await self.get_collection(collection_name)
if len(data_point_ids) == 1:
results = await collection.query().where(f"id = '{data_point_ids[0]}'").to_pandas()
results = await collection.query().where(f"id = '{data_point_ids[0]}'")
else:
results = await collection.query().where(f"id IN {tuple(data_point_ids)}").to_pandas()
results = await collection.query().where(f"id IN {tuple(data_point_ids)}")
# Convert query results to list format
results_list = results.to_list() if hasattr(results, "to_list") else list(results)
return [
ScoredResult(
@ -215,7 +218,7 @@ class LanceDBAdapter(VectorDBInterface):
payload=result["payload"],
score=0,
)
for result in results.to_dict("index").values()
for result in results_list
]
async def search(
@ -242,9 +245,7 @@ class LanceDBAdapter(VectorDBInterface):
if limit == 0:
return []
results = await collection.vector_search(query_vector).limit(limit).to_pandas()
result_values = list(results.to_dict("index").values())
result_values = await collection.vector_search(query_vector).limit(limit).to_list()
if not result_values:
return []

View file

@ -9,7 +9,6 @@ from sqlalchemy.exc import ProgrammingError
from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential
from asyncpg import DeadlockDetectedError, DuplicateTableError, UniqueViolationError
from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.engine import DataPoint
from cognee.infrastructure.engine.utils import parse_id

View file

@ -1,6 +1,5 @@
import os
import s3fs
from typing import BinaryIO, Union
from typing import BinaryIO, Union, TYPE_CHECKING
from contextlib import asynccontextmanager
from cognee.infrastructure.files.storage.s3_config import get_s3_config
@ -8,23 +7,34 @@ from cognee.infrastructure.utils.run_async import run_async
from cognee.infrastructure.files.storage.FileBufferedReader import FileBufferedReader
from .storage import Storage
if TYPE_CHECKING:
import s3fs
class S3FileStorage(Storage):
"""
Manage local file storage operations such as storing, retrieving, and managing files on
the filesystem.
Manage S3 file storage operations such as storing, retrieving, and managing files on
S3-compatible storage.
"""
storage_path: str
s3: s3fs.S3FileSystem
s3: "s3fs.S3FileSystem"
def __init__(self, storage_path: str):
try:
import s3fs
except ImportError:
raise ImportError(
's3fs is required for S3FileStorage. Install it with: pip install cognee"[aws]"'
)
self.storage_path = storage_path
s3_config = get_s3_config()
if s3_config.aws_access_key_id is not None and s3_config.aws_secret_access_key is not None:
self.s3 = s3fs.S3FileSystem(
key=s3_config.aws_access_key_id,
secret=s3_config.aws_secret_access_key,
token=s3_config.aws_session_token,
anon=False,
endpoint_url=s3_config.aws_endpoint_url,
client_kwargs={"region_name": s3_config.aws_region},

View file

@ -8,6 +8,7 @@ class S3Config(BaseSettings):
aws_endpoint_url: Optional[str] = None
aws_access_key_id: Optional[str] = None
aws_secret_access_key: Optional[str] = None
aws_session_token: Optional[str] = None
model_config = SettingsConfigDict(env_file=".env", extra="allow")

View file

@ -4,7 +4,6 @@ from urllib.parse import urlparse
from contextlib import asynccontextmanager
from cognee.infrastructure.files.utils.get_data_file_path import get_data_file_path
from cognee.infrastructure.files.storage.S3FileStorage import S3FileStorage
from cognee.infrastructure.files.storage.LocalFileStorage import LocalFileStorage
@ -23,23 +22,17 @@ async def open_data_file(file_path: str, mode: str = "rb", encoding: str = None,
yield file
elif file_path.startswith("s3://"):
try:
from cognee.infrastructure.files.storage.S3FileStorage import S3FileStorage
except ImportError:
raise ImportError(
"S3 dependencies are not installed. Please install with 'pip install cognee\"[aws]\"' to use S3 functionality."
)
normalized_url = get_data_file_path(file_path)
s3_dir_path = os.path.dirname(normalized_url)
s3_filename = os.path.basename(normalized_url)
# if "/" in s3_path:
# s3_dir = "/".join(s3_path.split("/")[:-1])
# s3_filename = s3_path.split("/")[-1]
# else:
# s3_dir = ""
# s3_filename = s3_path
# Extract filesystem path from S3 URL structure
# file_dir_path = (
# f"s3://{parsed_url.netloc}/{s3_dir}" if s3_dir else f"s3://{parsed_url.netloc}"
# )
# file_name = s3_filename
file_storage = S3FileStorage(s3_dir_path)
async with file_storage.open(s3_filename, mode=mode, **kwargs) as file:

View file

@ -1,9 +1,13 @@
import os
from typing import Optional, ClassVar
from typing import Optional, ClassVar, Any
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
from pydantic import model_validator
from baml_py import ClientRegistry
try:
from baml_py import ClientRegistry
except ImportError:
ClientRegistry = None
class LLMConfig(BaseSettings):
@ -65,27 +69,36 @@ class LLMConfig(BaseSettings):
fallback_endpoint: str = ""
fallback_model: str = ""
baml_registry: ClassVar[ClientRegistry] = ClientRegistry()
baml_registry: Optional[Any] = None
model_config = SettingsConfigDict(env_file=".env", extra="allow")
def model_post_init(self, __context) -> None:
"""Initialize the BAML registry after the model is created."""
raw_options = {
"model": self.baml_llm_model,
"temperature": self.baml_llm_temperature,
"api_key": self.baml_llm_api_key,
"base_url": self.baml_llm_endpoint,
"api_version": self.baml_llm_api_version,
}
# Check if BAML is selected as structured output framework but not available
if self.structured_output_framework.lower() == "baml" and ClientRegistry is None:
raise ImportError(
"BAML is selected as structured output framework but not available. "
"Please install with 'pip install cognee\"[baml]\"' to use BAML extraction features."
)
elif self.structured_output_framework.lower() == "baml" and ClientRegistry is not None:
self.baml_registry = ClientRegistry()
# Note: keep the item only when the value is not None or an empty string (they would override baml default values)
options = {k: v for k, v in raw_options.items() if v not in (None, "")}
self.baml_registry.add_llm_client(
name=self.baml_llm_provider, provider=self.baml_llm_provider, options=options
)
# Sets the primary client
self.baml_registry.set_primary(self.baml_llm_provider)
raw_options = {
"model": self.baml_llm_model,
"temperature": self.baml_llm_temperature,
"api_key": self.baml_llm_api_key,
"base_url": self.baml_llm_endpoint,
"api_version": self.baml_llm_api_version,
}
# Note: keep the item only when the value is not None or an empty string (they would override baml default values)
options = {k: v for k, v in raw_options.items() if v not in (None, "")}
self.baml_registry.add_llm_client(
name=self.baml_llm_provider, provider=self.baml_llm_provider, options=options
)
# Sets the primary client
self.baml_registry.set_primary(self.baml_llm_provider)
@model_validator(mode="after")
def ensure_env_vars_for_ollama(self) -> "LLMConfig":

View file

@ -53,7 +53,8 @@ async def acreate_structured_output(
# Transform BAML response to proper pydantic reponse model
if response_model is str:
return str(result)
# Note: when a response model is set to string in python, result is stored in text property in the BAML response model
return str(result.text)
return response_model.model_validate(result.dict())

View file

@ -1,7 +1,6 @@
from typing import BinaryIO
from contextlib import asynccontextmanager
import hashlib
from cognee.infrastructure.data.utils.extract_keywords import extract_keywords
from .IngestionData import IngestionData

View file

@ -42,14 +42,12 @@ class TripletSearchContextProvider(BaseContextProvider):
self,
entities: List[DataPoint],
query: str,
user: User,
memory_fragment: CogneeGraph,
) -> List:
"""Creates search tasks for valid entities."""
tasks = [
brute_force_triplet_search(
query=f"{entity_text} {query}",
user=user,
top_k=self.top_k,
collections=self.collections,
properties_to_project=self.properties_to_project,
@ -84,9 +82,8 @@ class TripletSearchContextProvider(BaseContextProvider):
if not entities:
return "No entities provided for context search."
user = await get_default_user()
memory_fragment = await get_memory_fragment(self.properties_to_project)
search_tasks = self._get_search_tasks(entities, query, user, memory_fragment)
search_tasks = self._get_search_tasks(entities, query, memory_fragment)
if not search_tasks:
return "No valid entities found for context search."

View file

@ -93,11 +93,8 @@ class GraphCompletionRetriever(BaseGraphRetriever):
for field_name in index_fields:
vector_index_collections.append(f"{subclass.__name__}_{field_name}")
user = await get_default_user()
found_triplets = await brute_force_triplet_search(
query,
user=user,
top_k=self.top_k,
collections=vector_index_collections or None,
node_type=self.node_type,

View file

@ -89,7 +89,6 @@ async def get_memory_fragment(
async def brute_force_triplet_search(
query: str,
user: User,
top_k: int = 5,
collections: Optional[List[str]] = None,
properties_to_project: Optional[List[str]] = None,
@ -102,7 +101,6 @@ async def brute_force_triplet_search(
Args:
query (str): The search query.
user (User): The user performing the search.
top_k (int): The number of top results to retrieve.
collections (Optional[List[str]]): List of collections to query.
properties_to_project (Optional[List[str]]): List of properties to project.
@ -139,8 +137,6 @@ async def brute_force_triplet_search(
query_vector = (await vector_engine.embedding_engine.embed_text([query]))[0]
send_telemetry("cognee.brute_force_triplet_search EXECUTION STARTED", user.id)
async def search_in_collection(collection_name: str):
try:
return await vector_engine.search(
@ -176,20 +172,14 @@ async def brute_force_triplet_search(
results = await memory_fragment.calculate_top_triplet_importances(k=top_k)
send_telemetry("cognee.brute_force_triplet_search EXECUTION COMPLETED", user.id)
return results
except CollectionNotFoundError:
return []
except Exception as error:
logger.error(
"Error during brute force search for user: %s, query: %s. Error: %s",
user.id,
"Error during brute force search for query: %s. Error: %s",
query,
error,
)
send_telemetry(
"cognee.brute_force_triplet_search EXECUTION FAILED", user.id, {"error": str(error)}
)
raise error

View file

@ -35,7 +35,7 @@ async def no_access_control_search(
[get_completion, get_context] = search_tools
if only_context:
return await get_context(query_text)
return None, await get_context(query_text), []
context = await get_context(query_text)
result = await get_completion(query_text, context)

View file

@ -143,20 +143,35 @@ async def search(
context = prepared_search_results["context"]
datasets = prepared_search_results["datasets"]
return_value.append(
{
"search_result": [result] if result else None,
"dataset_id": datasets[0].id,
"dataset_name": datasets[0].name,
"graphs": graphs,
}
)
if only_context:
return_value.append(
{
"search_result": [context] if context else None,
"dataset_id": datasets[0].id,
"dataset_name": datasets[0].name,
"graphs": graphs,
}
)
else:
return_value.append(
{
"search_result": [result] if result else None,
"dataset_id": datasets[0].id,
"dataset_name": datasets[0].name,
"graphs": graphs,
}
)
return return_value
else:
return_value = []
for search_result in search_results:
result, context, datasets = search_result
return_value.append(result)
if only_context:
for search_result in search_results:
prepared_search_results = await prepare_search_result(search_result)
return_value.append(prepared_search_results["context"])
else:
for search_result in search_results:
result, context, datasets = search_result
return_value.append(result)
# For maintaining backwards compatibility
if len(return_value) == 1 and isinstance(return_value[0], list):
return return_value[0]

View file

@ -1,6 +1,5 @@
import os
import json
import networkx
from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.files.storage.LocalFileStorage import LocalFileStorage
@ -9,6 +8,8 @@ logger = get_logger()
async def cognee_network_visualization(graph_data, destination_file_path: str = None):
import networkx
nodes_data, edges_data = graph_data
G = networkx.DiGraph()
@ -22,6 +23,9 @@ async def cognee_network_visualization(graph_data, destination_file_path: str =
"TableRow": "#f47710",
"TableType": "#6510f4",
"ColumnValue": "#13613a",
"SchemaTable": "#f47710",
"DatabaseSchema": "#6510f4",
"SchemaRelationship": "#13613a",
"default": "#D3D3D3",
}
@ -104,7 +108,7 @@ async def cognee_network_visualization(graph_data, destination_file_path: str =
.nodes circle { stroke: white; stroke-width: 0.5px; filter: drop-shadow(0 0 5px rgba(255,255,255,0.3)); }
.node-label { font-size: 5px; font-weight: bold; fill: white; text-anchor: middle; dominant-baseline: middle; font-family: 'Inter', sans-serif; pointer-events: none; }
.edge-label { font-size: 3px; fill: rgba(255, 255, 255, 0.7); text-anchor: middle; dominant-baseline: middle; font-family: 'Inter', sans-serif; pointer-events: none; }
.tooltip {
position: absolute;
text-align: left;
@ -166,7 +170,7 @@ async def cognee_network_visualization(graph_data, destination_file_path: str =
// Create tooltip content for edge
var content = "<strong>Edge Information</strong><br/>";
content += "Relationship: " + d.relation + "<br/>";
// Show all weights
if (d.all_weights && Object.keys(d.all_weights).length > 0) {
content += "<strong>Weights:</strong><br/>";
@ -176,23 +180,23 @@ async def cognee_network_visualization(graph_data, destination_file_path: str =
} else if (d.weight !== null && d.weight !== undefined) {
content += "Weight: " + d.weight + "<br/>";
}
if (d.relationship_type) {
content += "Type: " + d.relationship_type + "<br/>";
}
// Add other edge properties
if (d.edge_info) {
Object.keys(d.edge_info).forEach(function(key) {
if (key !== 'weight' && key !== 'weights' && key !== 'relationship_type' &&
key !== 'source_node_id' && key !== 'target_node_id' &&
key !== 'relationship_name' && key !== 'updated_at' &&
if (key !== 'weight' && key !== 'weights' && key !== 'relationship_type' &&
key !== 'source_node_id' && key !== 'target_node_id' &&
key !== 'relationship_name' && key !== 'updated_at' &&
!key.startsWith('weight_')) {
content += key + ": " + d.edge_info[key] + "<br/>";
}
});
}
tooltip.html(content)
.style("left", (d3.event.pageX + 10) + "px")
.style("top", (d3.event.pageY - 10) + "px")

View file

@ -4,7 +4,6 @@ import os
import ssl
import requests
from datetime import datetime, timezone
import matplotlib.pyplot as plt
import http.server
import socketserver
from threading import Thread
@ -30,37 +29,6 @@ def create_secure_ssl_context() -> ssl.SSLContext:
return ssl.create_default_context()
def get_entities(tagged_tokens):
import nltk
nltk.download("maxent_ne_chunker", quiet=True)
from nltk.chunk import ne_chunk
return ne_chunk(tagged_tokens)
def extract_pos_tags(sentence):
"""Extract Part-of-Speech (POS) tags for words in a sentence."""
import nltk
# Ensure that the necessary NLTK resources are downloaded
nltk.download("words", quiet=True)
nltk.download("punkt", quiet=True)
nltk.download("averaged_perceptron_tagger", quiet=True)
from nltk.tag import pos_tag
from nltk.tokenize import word_tokenize
# Tokenize the sentence into words
tokens = word_tokenize(sentence)
# Tag each word with its corresponding POS tag
pos_tags = pos_tag(tokens)
return pos_tags
def get_anonymous_id():
"""Creates or reads a anonymous user id"""
tracking_id = os.getenv("TRACKING_ID", None)

View file

@ -1,309 +0,0 @@
# PROPOSED TO BE DEPRECATED
"""This module contains the OntologyEngine class which is responsible for adding graph ontology from a JSON or CSV file."""
import csv
import json
from cognee.shared.logging_utils import get_logger
from datetime import datetime, timezone
from fastapi import status
from typing import Any, Dict, List, Optional, Union, Type
import aiofiles
import pandas as pd
from pydantic import BaseModel
from cognee.modules.graph.exceptions import EntityNotFoundError
from cognee.modules.ingestion.exceptions import IngestionError
from cognee.infrastructure.data.chunking.config import get_chunk_config
from cognee.infrastructure.data.chunking.get_chunking_engine import get_chunk_engine
from cognee.infrastructure.databases.graph.get_graph_engine import get_graph_engine
from cognee.infrastructure.files.utils.extract_text_from_file import extract_text_from_file
from cognee.infrastructure.files.utils.guess_file_type import guess_file_type, FileTypeException
from cognee.modules.data.methods.add_model_class_to_graph import (
add_model_class_to_graph,
)
from cognee.tasks.graph.models import NodeModel, GraphOntology
from cognee.shared.data_models import KnowledgeGraph
from cognee.modules.engine.utils import generate_node_id, generate_node_name
from cognee.infrastructure.llm.LLMGateway import LLMGateway
logger = get_logger("task:infer_data_ontology")
async def extract_ontology(content: str, response_model: Type[BaseModel]):
"""
Extracts structured ontology from the provided content using a pre-defined LLM client.
This asynchronous function retrieves a system prompt from a file and utilizes an LLM
client to create a structured output based on the input content and specified response
model.
Parameters:
-----------
- content (str): The content from which to extract the ontology.
- response_model (Type[BaseModel]): The model that defines the structure of the
output ontology.
Returns:
--------
The structured ontology extracted from the content.
"""
system_prompt = LLMGateway.read_query_prompt("extract_ontology.txt")
ontology = await LLMGateway.acreate_structured_output(content, system_prompt, response_model)
return ontology
class OntologyEngine:
"""
Manage ontology data and operations for graph structures, providing methods for data
loading, flattening models, and adding ontological relationships to a graph database.
Public methods:
- flatten_model
- recursive_flatten
- load_data
- add_graph_ontology
"""
async def flatten_model(
self, model: NodeModel, parent_id: Optional[str] = None
) -> Dict[str, Any]:
"""
Flatten the model to a dictionary including optional parent ID and relationship details
if available.
Parameters:
-----------
- model (NodeModel): The NodeModel instance to flatten.
- parent_id (Optional[str]): An optional ID of the parent node for hierarchical
purposes. (default None)
Returns:
--------
- Dict[str, Any]: A dictionary representation of the model with flattened
attributes.
"""
result = model.dict()
result["parent_id"] = parent_id
if model.default_relationship:
result.update(
{
"relationship_type": model.default_relationship.type,
"relationship_source": model.default_relationship.source,
"relationship_target": model.default_relationship.target,
}
)
return result
async def recursive_flatten(
self, items: Union[List[Dict[str, Any]], Dict[str, Any]], parent_id: Optional[str] = None
) -> List[Dict[str, Any]]:
"""
Recursively flatten a hierarchical structure of models into a flat list of dictionaries.
Parameters:
-----------
- items (Union[List[Dict[str, Any]], Dict[str, Any]]): A list or dictionary
containing models to flatten.
- parent_id (Optional[str]): An optional ID of the parent node to maintain hierarchy
during flattening. (default None)
Returns:
--------
- List[Dict[str, Any]]: A flat list of dictionaries representing the hierarchical
model structure.
"""
flat_list = []
if isinstance(items, list):
for item in items:
flat_list.extend(await self.recursive_flatten(item, parent_id))
elif isinstance(items, dict):
model = NodeModel.model_validate(items)
flat_list.append(await self.flatten_model(model, parent_id))
for child in model.children:
flat_list.extend(await self.recursive_flatten(child, model.node_id))
return flat_list
async def load_data(self, file_path: str) -> Union[List[Dict[str, Any]], Dict[str, Any]]:
"""
Load data from a specified JSON or CSV file and return it in a structured format.
Parameters:
-----------
- file_path (str): The path to the file to load data from.
Returns:
--------
- Union[List[Dict[str, Any]], Dict[str, Any]]: Parsed data from the file as either a
list of dictionaries or a single dictionary depending on content type.
"""
try:
if file_path.endswith(".json"):
async with aiofiles.open(file_path, mode="r") as f:
data = await f.read()
return json.loads(data)
elif file_path.endswith(".csv"):
async with aiofiles.open(file_path, mode="r") as f:
content = await f.read()
reader = csv.DictReader(content.splitlines())
return list(reader)
else:
raise IngestionError(message="Unsupported file format")
except Exception as e:
raise IngestionError(
message=f"Failed to load data from {file_path}: {e}",
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
)
async def add_graph_ontology(self, file_path: str = None, documents: list = None):
"""
Add graph ontology from a JSON or CSV file, or infer relationships from provided
document content. Raise exceptions for invalid file types or missing entities.
Parameters:
-----------
- file_path (str): Optional path to a file containing data to be loaded. (default
None)
- documents (list): Optional list of document objects for content extraction if no
file path is provided. (default None)
"""
if file_path is None:
initial_chunks_and_ids = []
chunk_config = get_chunk_config()
chunk_engine = get_chunk_engine()
chunk_strategy = chunk_config.chunk_strategy
for base_file in documents:
with open(base_file.raw_data_location, "rb") as file:
try:
file_type = guess_file_type(file)
text = extract_text_from_file(file, file_type)
subchunks, chunks_with_ids = chunk_engine.chunk_data(
chunk_strategy,
text,
chunk_config.chunk_size,
chunk_config.chunk_overlap,
)
if chunks_with_ids[0][0] == 1:
initial_chunks_and_ids.append({base_file.id: chunks_with_ids})
except FileTypeException:
logger.warning(
"File (%s) has an unknown file type. We are skipping it.", file["id"]
)
ontology = await extract_ontology(str(initial_chunks_and_ids), GraphOntology)
graph_client = await get_graph_engine()
await graph_client.add_nodes(
[
(
node.id,
dict(
uuid=generate_node_id(node.id),
name=generate_node_name(node.name),
type=generate_node_id(node.id),
description=node.description,
updated_at=datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S"),
),
)
for node in ontology.nodes
]
)
await graph_client.add_edges(
(
generate_node_id(edge.source_id),
generate_node_id(edge.target_id),
edge.relationship_type,
dict(
source_node_id=generate_node_id(edge.source_id),
target_node_id=generate_node_id(edge.target_id),
relationship_name=edge.relationship_type,
updated_at=datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S"),
),
)
for edge in ontology.edges
)
else:
dataset_level_information = documents[0][1]
# Extract the list of valid IDs from the explanations
valid_ids = {item["id"] for item in dataset_level_information}
try:
data = await self.load_data(file_path)
flt_ontology = await self.recursive_flatten(data)
df = pd.DataFrame(flt_ontology)
graph_client = await get_graph_engine()
for _, row in df.iterrows():
node_data = row.to_dict()
node_id = node_data.pop("node_id", None)
if node_id in valid_ids:
await graph_client.add_node(node_id, node_data)
if node_id not in valid_ids:
raise EntityNotFoundError(
message=f"Node ID {node_id} not found in the dataset"
)
if pd.notna(row.get("relationship_source")) and pd.notna(
row.get("relationship_target")
):
await graph_client.add_edge(
row["relationship_source"],
row["relationship_target"],
relationship_name=row["relationship_type"],
edge_properties={
"source_node_id": row["relationship_source"],
"target_node_id": row["relationship_target"],
"relationship_name": row["relationship_type"],
"updated_at": datetime.now(timezone.utc).strftime(
"%Y-%m-%d %H:%M:%S"
),
},
)
return
except Exception as e:
raise RuntimeError(f"Failed to add graph ontology from {file_path}: {e}") from e
async def infer_data_ontology(documents, ontology_model=KnowledgeGraph, root_node_id=None):
"""
Infer data ontology from provided documents and optionally add it to a graph.
Parameters:
-----------
- documents: The documents from which to infer the ontology.
- ontology_model: The ontology model to use for the inference, defaults to
KnowledgeGraph. (default KnowledgeGraph)
- root_node_id: An optional root node identifier for the ontology. (default None)
"""
if ontology_model == KnowledgeGraph:
ontology_engine = OntologyEngine()
root_node_id = await ontology_engine.add_graph_ontology(documents=documents)
else:
graph_engine = await get_graph_engine()
await add_model_class_to_graph(ontology_model, graph_engine)
yield (documents, root_node_id)

View file

@ -4,16 +4,20 @@ from sqlalchemy import text
from cognee.infrastructure.databases.relational.get_migration_relational_engine import (
get_migration_relational_engine,
)
from cognee.infrastructure.databases.relational.config import get_migration_config
from cognee.tasks.storage.index_data_points import index_data_points
from cognee.tasks.storage.index_graph_edges import index_graph_edges
from cognee.tasks.schema.ingest_database_schema import ingest_database_schema
from cognee.modules.engine.models import TableRow, TableType, ColumnValue
logger = logging.getLogger(__name__)
async def migrate_relational_database(graph_db, schema, migrate_column_data=True):
async def migrate_relational_database(
graph_db, schema, migrate_column_data=True, schema_only=False
):
"""
Migrates data from a relational database into a graph database.
@ -26,11 +30,133 @@ async def migrate_relational_database(graph_db, schema, migrate_column_data=True
Both TableType and TableRow inherit from DataPoint to maintain consistency with Cognee data model.
"""
# Create a mapping of node_id to node objects for referencing in edge creation
if schema_only:
node_mapping, edge_mapping = await schema_only_ingestion(schema)
else:
node_mapping, edge_mapping = await complete_database_ingestion(schema, migrate_column_data)
def _remove_duplicate_edges(edge_mapping):
seen = set()
unique_original_shape = []
for tup in edge_mapping:
# We go through all the tuples in the edge_mapping and we only add unique tuples to the list
# To eliminate duplicate edges.
source_id, target_id, rel_name, rel_dict = tup
# We need to convert the dictionary to a frozenset to be able to compare values for it
rel_dict_hashable = frozenset(sorted(rel_dict.items()))
hashable_tup = (source_id, target_id, rel_name, rel_dict_hashable)
# We use the seen set to keep track of unique edges
if hashable_tup not in seen:
# A list that has frozensets elements instead of dictionaries is needed to be able to compare values
seen.add(hashable_tup)
# append the original tuple shape (with the dictionary) if it's the first time we see it
unique_original_shape.append(tup)
return unique_original_shape
# Add all nodes and edges to the graph
# NOTE: Nodes and edges have to be added in batch for speed optimization, Especially for NetworkX.
# If we'd create nodes and add them to graph in real time the process would take too long.
# Every node and edge added to NetworkX is saved to file which is very slow when not done in batches.
await graph_db.add_nodes(list(node_mapping.values()))
await graph_db.add_edges(_remove_duplicate_edges(edge_mapping))
# In these steps we calculate the vector embeddings of our nodes and edges and save them to vector database
# Cognee uses this information to perform searches on the knowledge graph.
await index_data_points(list(node_mapping.values()))
await index_graph_edges()
logger.info("Data successfully migrated from relational database to desired graph database.")
return await graph_db.get_graph_data()
async def schema_only_ingestion(schema):
node_mapping = {}
edge_mapping = []
# Calling the ingest_database_schema function to return DataPoint subclasses
result = await ingest_database_schema(
schema=schema,
max_sample_rows=5,
)
database_schema = result["database_schema"]
schema_tables = result["schema_tables"]
schema_relationships = result["relationships"]
database_node_id = database_schema.id
node_mapping[database_node_id] = database_schema
for table in schema_tables:
table_node_id = table.id
# Add TableSchema Datapoint as a node.
node_mapping[table_node_id] = table
edge_mapping.append(
(
table_node_id,
database_node_id,
"is_part_of",
dict(
source_node_id=table_node_id,
target_node_id=database_node_id,
relationship_name="is_part_of",
),
)
)
table_name_to_id = {t.name: t.id for t in schema_tables}
for rel in schema_relationships:
source_table_id = table_name_to_id.get(rel.source_table)
target_table_id = table_name_to_id.get(rel.target_table)
relationship_id = rel.id
# Add RelationshipTable DataPoint as a node.
node_mapping[relationship_id] = rel
edge_mapping.append(
(
source_table_id,
relationship_id,
"has_relationship",
dict(
source_node_id=source_table_id,
target_node_id=relationship_id,
relationship_name=rel.relationship_type,
),
)
)
edge_mapping.append(
(
relationship_id,
target_table_id,
"has_relationship",
dict(
source_node_id=relationship_id,
target_node_id=target_table_id,
relationship_name=rel.relationship_type,
),
)
)
edge_mapping.append(
(
source_table_id,
target_table_id,
rel.relationship_type,
dict(
source_node_id=source_table_id,
target_node_id=target_table_id,
relationship_name=rel.relationship_type,
),
)
)
return node_mapping, edge_mapping
async def complete_database_ingestion(schema, migrate_column_data):
engine = get_migration_relational_engine()
# Create a mapping of node_id to node objects for referencing in edge creation
node_mapping = {}
edge_mapping = []
async with engine.engine.begin() as cursor:
# First, create table type nodes for all tables
for table_name, details in schema.items():
@ -38,7 +164,7 @@ async def migrate_relational_database(graph_db, schema, migrate_column_data=True
table_node = TableType(
id=uuid5(NAMESPACE_OID, name=table_name),
name=table_name,
description=f"Table: {table_name}",
description=f'Relational database table with the following name: "{table_name}".',
)
# Add TableType node to mapping ( node will be added to the graph later based on this mapping )
@ -75,7 +201,7 @@ async def migrate_relational_database(graph_db, schema, migrate_column_data=True
name=node_id,
is_a=table_node,
properties=str(row_properties),
description=f"Row in {table_name} with {primary_key_col}={primary_key_value}",
description=f'Row in relational database table from the table with the name: "{table_name}" with the following row data {str(row_properties)} where the dictionary key value is the column name and the value is the column value. This row has the id of: {node_id}',
)
# Store the node object in our mapping
@ -113,7 +239,7 @@ async def migrate_relational_database(graph_db, schema, migrate_column_data=True
id=uuid5(NAMESPACE_OID, name=column_node_id),
name=column_node_id,
properties=f"{key} {value} {table_name}",
description=f"Column name={key} and value={value} from column from table={table_name}",
description=f"column from relational database table={table_name}. Column name={key} and value={value}. The value of the column is related to the following row with this id: {row_node.id}. This column has the following ID: {column_node_id}",
)
node_mapping[column_node_id] = column_node
@ -180,39 +306,4 @@ async def migrate_relational_database(graph_db, schema, migrate_column_data=True
),
)
)
def _remove_duplicate_edges(edge_mapping):
seen = set()
unique_original_shape = []
for tup in edge_mapping:
# We go through all the tuples in the edge_mapping and we only add unique tuples to the list
# To eliminate duplicate edges.
source_id, target_id, rel_name, rel_dict = tup
# We need to convert the dictionary to a frozenset to be able to compare values for it
rel_dict_hashable = frozenset(sorted(rel_dict.items()))
hashable_tup = (source_id, target_id, rel_name, rel_dict_hashable)
# We use the seen set to keep track of unique edges
if hashable_tup not in seen:
# A list that has frozensets elements instead of dictionaries is needed to be able to compare values
seen.add(hashable_tup)
# append the original tuple shape (with the dictionary) if it's the first time we see it
unique_original_shape.append(tup)
return unique_original_shape
# Add all nodes and edges to the graph
# NOTE: Nodes and edges have to be added in batch for speed optimization, Especially for NetworkX.
# If we'd create nodes and add them to graph in real time the process would take too long.
# Every node and edge added to NetworkX is saved to file which is very slow when not done in batches.
await graph_db.add_nodes(list(node_mapping.values()))
await graph_db.add_edges(_remove_duplicate_edges(edge_mapping))
# In these steps we calculate the vector embeddings of our nodes and edges and save them to vector database
# Cognee uses this information to perform searches on the knowledge graph.
await index_data_points(list(node_mapping.values()))
await index_graph_edges()
logger.info("Data successfully migrated from relational database to desired graph database.")
return await graph_db.get_graph_data()
return node_mapping, edge_mapping

View file

@ -32,7 +32,10 @@ async def resolve_data_directories(
import s3fs
fs = s3fs.S3FileSystem(
key=s3_config.aws_access_key_id, secret=s3_config.aws_secret_access_key, anon=False
key=s3_config.aws_access_key_id,
secret=s3_config.aws_secret_access_key,
token=s3_config.aws_session_token,
anon=False,
)
for item in data:

View file

@ -0,0 +1,134 @@
from typing import List, Dict
from uuid import uuid5, NAMESPACE_OID
from cognee.infrastructure.engine.models.DataPoint import DataPoint
from sqlalchemy import text
from cognee.tasks.schema.models import DatabaseSchema, SchemaTable, SchemaRelationship
from cognee.infrastructure.databases.relational.get_migration_relational_engine import (
get_migration_relational_engine,
)
from cognee.infrastructure.databases.relational.config import get_migration_config
from datetime import datetime, timezone
async def ingest_database_schema(
schema,
max_sample_rows: int = 0,
) -> Dict[str, List[DataPoint] | DataPoint]:
"""
Extract database schema metadata (optionally with sample data) and return DataPoint models for graph construction.
Args:
schema: Database schema
max_sample_rows: Maximum sample rows per table (0 means no sampling)
Returns:
Dict with keys:
"database_schema": DatabaseSchema
"schema_tables": List[SchemaTable]
"relationships": List[SchemaRelationship]
"""
tables = {}
sample_data = {}
schema_tables = []
schema_relationships = []
migration_config = get_migration_config()
engine = get_migration_relational_engine()
qi = engine.engine.dialect.identifier_preparer.quote
try:
max_sample_rows = max(0, int(max_sample_rows))
except (TypeError, ValueError):
max_sample_rows = 0
def qname(name: str):
split_name = name.split(".")
return ".".join(qi(p) for p in split_name)
async with engine.engine.begin() as cursor:
for table_name, details in schema.items():
tn = qname(table_name)
if max_sample_rows > 0:
rows_result = await cursor.execute(
text(f"SELECT * FROM {tn} LIMIT :limit;"), # noqa: S608 - tn is fully quoted
{"limit": max_sample_rows},
)
rows = [dict(r) for r in rows_result.mappings().all()]
else:
rows = []
if engine.engine.dialect.name == "postgresql":
if "." in table_name:
schema_part, table_part = table_name.split(".", 1)
else:
schema_part, table_part = "public", table_name
estimate = await cursor.execute(
text(
"SELECT reltuples::bigint AS estimate "
"FROM pg_class c "
"JOIN pg_namespace n ON n.oid = c.relnamespace "
"WHERE n.nspname = :schema AND c.relname = :table"
),
{"schema": schema_part, "table": table_part},
)
row_count_estimate = estimate.scalar() or 0
else:
count_result = await cursor.execute(text(f"SELECT COUNT(*) FROM {tn};")) # noqa: S608 - tn is fully quoted
row_count_estimate = count_result.scalar()
schema_table = SchemaTable(
id=uuid5(NAMESPACE_OID, name=f"{table_name}"),
name=table_name,
columns=details["columns"],
primary_key=details.get("primary_key"),
foreign_keys=details.get("foreign_keys", []),
sample_rows=rows,
row_count_estimate=row_count_estimate,
description=f"Relational database table with '{table_name}' with {len(details['columns'])} columns and approx. {row_count_estimate} rows."
f"Here are the columns this table contains: {details['columns']}"
f"Here are a few sample_rows to show the contents of the table: {rows}"
f"Table is part of the database: {migration_config.migration_db_name}",
)
schema_tables.append(schema_table)
tables[table_name] = details
sample_data[table_name] = rows
for fk in details.get("foreign_keys", []):
ref_table_fq = fk["ref_table"]
if "." not in ref_table_fq and "." in table_name:
ref_table_fq = f"{table_name.split('.', 1)[0]}.{ref_table_fq}"
relationship_name = (
f"{table_name}:{fk['column']}->{ref_table_fq}:{fk['ref_column']}"
)
relationship = SchemaRelationship(
id=uuid5(NAMESPACE_OID, name=relationship_name),
name=relationship_name,
source_table=table_name,
target_table=ref_table_fq,
relationship_type="foreign_key",
source_column=fk["column"],
target_column=fk["ref_column"],
description=f"Relational database table foreign key relationship between: {table_name}.{fk['column']}{ref_table_fq}.{fk['ref_column']}"
f"This foreing key relationship between table columns is a part of the following database: {migration_config.migration_db_name}",
)
schema_relationships.append(relationship)
id_str = f"{migration_config.migration_db_provider}:{migration_config.migration_db_name}"
database_schema = DatabaseSchema(
id=uuid5(NAMESPACE_OID, name=id_str),
name=migration_config.migration_db_name,
database_type=migration_config.migration_db_provider,
tables=tables,
sample_data=sample_data,
extraction_timestamp=datetime.now(timezone.utc),
description=f"Database schema containing {len(schema_tables)} tables and {len(schema_relationships)} relationships. "
f"The database type is {migration_config.migration_db_provider}."
f"The database contains the following tables: {tables}",
)
return {
"database_schema": database_schema,
"schema_tables": schema_tables,
"relationships": schema_relationships,
}

View file

@ -0,0 +1,41 @@
from cognee.infrastructure.engine.models.DataPoint import DataPoint
from typing import List, Dict, Optional
from datetime import datetime
class DatabaseSchema(DataPoint):
"""Represents a complete database schema with sample data"""
name: str
database_type: str # sqlite, postgres, etc.
tables: Dict[str, Dict] # Reuse existing schema format from SqlAlchemyAdapter
sample_data: Dict[str, List[Dict]] # Limited examples per table
extraction_timestamp: datetime
description: str
metadata: dict = {"index_fields": ["description", "name"]}
class SchemaTable(DataPoint):
"""Represents an individual table schema with relationships"""
name: str
columns: List[Dict] # Column definitions with types
primary_key: Optional[str]
foreign_keys: List[Dict] # Foreign key relationships
sample_rows: List[Dict] # Max 3-5 example rows
row_count_estimate: Optional[int] # Actual table size
description: str
metadata: dict = {"index_fields": ["description", "name"]}
class SchemaRelationship(DataPoint):
"""Represents relationships between tables"""
name: str
source_table: str
target_table: str
relationship_type: str # "foreign_key", "one_to_many", etc.
source_column: str
target_column: str
description: str
metadata: dict = {"index_fields": ["description", "name"]}

View file

@ -197,6 +197,80 @@ async def relational_db_migration():
print(f"All checks passed for {graph_db_provider} provider with '{relationship_label}' edges!")
async def test_schema_only_migration():
# 1. Setup test DB and extract schema
migration_engine = await setup_test_db()
schema = await migration_engine.extract_schema()
# 2. Setup graph engine
graph_engine = await get_graph_engine()
# 4. Migrate schema only
await migrate_relational_database(graph_engine, schema=schema, schema_only=True)
# 5. Verify number of tables through search
search_results = await cognee.search(
query_text="How many tables are there in this database",
query_type=cognee.SearchType.GRAPH_COMPLETION,
top_k=30,
)
assert any("11" in r for r in search_results), (
"Number of tables in the database reported in search_results is either None or not equal to 11"
)
graph_db_provider = os.getenv("GRAPH_DATABASE_PROVIDER", "networkx").lower()
edge_counts = {
"is_part_of": 0,
"has_relationship": 0,
"foreign_key": 0,
}
if graph_db_provider == "neo4j":
for rel_type in edge_counts.keys():
query_str = f"""
MATCH ()-[r:{rel_type}]->()
RETURN count(r) as c
"""
rows = await graph_engine.query(query_str)
edge_counts[rel_type] = rows[0]["c"]
elif graph_db_provider == "kuzu":
for rel_type in edge_counts.keys():
query_str = f"""
MATCH ()-[r:EDGE]->()
WHERE r.relationship_name = '{rel_type}'
RETURN count(r) as c
"""
rows = await graph_engine.query(query_str)
edge_counts[rel_type] = rows[0][0]
elif graph_db_provider == "networkx":
nodes, edges = await graph_engine.get_graph_data()
for _, _, key, _ in edges:
if key in edge_counts:
edge_counts[key] += 1
else:
raise ValueError(f"Unsupported graph database provider: {graph_db_provider}")
# 7. Assert counts match expected values
expected_counts = {
"is_part_of": 11,
"has_relationship": 22,
"foreign_key": 11,
}
for rel_type, expected in expected_counts.items():
actual = edge_counts[rel_type]
assert actual == expected, (
f"Expected {expected} edges for relationship '{rel_type}', but found {actual}"
)
print("Schema-only migration edge counts validated successfully!")
print(f"Edge counts: {edge_counts}")
async def test_migration_sqlite():
database_to_migrate_path = os.path.join(pathlib.Path(__file__).parent, "test_data/")
@ -209,6 +283,7 @@ async def test_migration_sqlite():
)
await relational_db_migration()
await test_schema_only_migration()
async def test_migration_postgres():
@ -224,6 +299,7 @@ async def test_migration_postgres():
}
)
await relational_db_migration()
await test_schema_only_migration()
async def main():

View file

@ -7,7 +7,6 @@ import cognee
from cognee.low_level import setup, DataPoint
from cognee.tasks.storage import add_data_points
from cognee.modules.graph.utils import resolve_edges_to_text
from cognee.infrastructure.databases.exceptions import DatabaseNotCreatedError
from cognee.modules.retrieval.graph_completion_context_extension_retriever import (
GraphCompletionContextExtensionRetriever,
)
@ -165,9 +164,6 @@ class TestGraphCompletionWithContextExtensionRetriever:
retriever = GraphCompletionContextExtensionRetriever()
with pytest.raises(DatabaseNotCreatedError):
await retriever.get_context("Who works at Figma?")
await setup()
context = await retriever.get_context("Who works at Figma?")

View file

@ -7,7 +7,6 @@ import cognee
from cognee.low_level import setup, DataPoint
from cognee.modules.graph.utils import resolve_edges_to_text
from cognee.tasks.storage import add_data_points
from cognee.infrastructure.databases.exceptions import DatabaseNotCreatedError
from cognee.modules.retrieval.graph_completion_cot_retriever import GraphCompletionCotRetriever
@ -158,9 +157,6 @@ class TestGraphCompletionCoTRetriever:
retriever = GraphCompletionCotRetriever()
with pytest.raises(DatabaseNotCreatedError):
await retriever.get_context("Who works at Figma?")
await setup()
context = await retriever.get_context("Who works at Figma?")

View file

@ -7,7 +7,6 @@ import cognee
from cognee.low_level import setup, DataPoint
from cognee.modules.graph.utils import resolve_edges_to_text
from cognee.tasks.storage import add_data_points
from cognee.infrastructure.databases.exceptions import DatabaseNotCreatedError
from cognee.modules.retrieval.graph_completion_retriever import GraphCompletionRetriever
@ -218,9 +217,6 @@ class TestGraphCompletionRetriever:
retriever = GraphCompletionRetriever()
with pytest.raises(DatabaseNotCreatedError):
await retriever.get_context("Who works at Figma?")
await setup()
context = await retriever.get_context("Who works at Figma?")

View file

@ -48,7 +48,6 @@ async def main():
query = "When was Kamala Harris in office?"
triplets = await brute_force_triplet_search(
query=query,
user=user,
top_k=3,
collections=["graphitinode_content", "graphitinode_name", "graphitinode_summary"],
)

View file

@ -1,16 +1,15 @@
from pathlib import Path
import asyncio
import cognee
import os
import cognee
from cognee.infrastructure.databases.relational.config import get_migration_config
from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.api.v1.visualize.visualize import visualize_graph
from cognee.infrastructure.databases.relational import (
get_migration_relational_engine,
)
from cognee.modules.search.types import SearchType
from cognee.infrastructure.databases.relational import (
create_db_and_tables as create_relational_db_and_tables,
)
@ -32,16 +31,29 @@ from cognee.infrastructure.databases.vector.pgvector import (
async def main():
engine = get_migration_relational_engine()
# Clean all data stored in Cognee
await cognee.prune.prune_data()
await cognee.prune.prune_system(metadata=True)
# Needed to create appropriate tables only on the Cognee side
# Needed to create appropriate database tables only on the Cognee side
await create_relational_db_and_tables()
await create_vector_db_and_tables()
# In case environment variables are not set use the example database from the Cognee repo
migration_db_provider = os.environ.get("MIGRATION_DB_PROVIDER", "sqlite")
migration_db_path = os.environ.get(
"MIGRATION_DB_PATH",
os.path.join(Path(__file__).resolve().parent.parent.parent, "cognee/tests/test_data"),
)
migration_db_name = os.environ.get("MIGRATION_DB_NAME", "migration_database.sqlite")
migration_config = get_migration_config()
migration_config.migration_db_provider = migration_db_provider
migration_config.migration_db_path = migration_db_path
migration_config.migration_db_name = migration_db_name
engine = get_migration_relational_engine()
print("\nExtracting schema of database to migrate.")
schema = await engine.extract_schema()
print(f"Migrated database schema:\n{schema}")
@ -53,10 +65,6 @@ async def main():
await migrate_relational_database(graph, schema=schema)
print("Relational database migration complete.")
# Define location where to store html visualization of graph of the migrated database
home_dir = os.path.expanduser("~")
destination_file_path = os.path.join(home_dir, "graph_visualization.html")
# Make sure to set top_k at a high value for a broader search, the default value is only 10!
# top_k represent the number of graph tripplets to supply to the LLM to answer your question
search_results = await cognee.search(
@ -69,13 +77,25 @@ async def main():
# Having a top_k value set to too high might overwhelm the LLM context when specific questions need to be answered.
# For this kind of question we've set the top_k to 30
search_results = await cognee.search(
query_type=SearchType.GRAPH_COMPLETION_COT,
query_type=SearchType.GRAPH_COMPLETION,
query_text="What invoices are related to Leonie Köhler?",
top_k=30,
)
print(f"Search results: {search_results}")
# test.html is a file with visualized data migration
search_results = await cognee.search(
query_type=SearchType.GRAPH_COMPLETION,
query_text="What invoices are related to Luís Gonçalves?",
top_k=30,
)
print(f"Search results: {search_results}")
# If you check the relational database for this example you can see that the search results successfully found all
# the invoices related to the two customers, without any hallucinations or additional information
# Define location where to store html visualization of graph of the migrated database
home_dir = os.path.expanduser("~")
destination_file_path = os.path.join(home_dir, "graph_visualization.html")
print("Adding html visualization of graph database after migration.")
await visualize_graph(destination_file_path)
print(f"Visualization can be found at: {destination_file_path}")

View file

@ -29,8 +29,11 @@ async def main():
print("=" * 60)
# Start the UI server
def dummy_callback(pid):
pass
server = cognee.start_ui(
host="localhost",
pid_callback=dummy_callback,
port=3000,
open_browser=True, # This will automatically open your browser
)

291
poetry.lock generated
View file

@ -4,9 +4,10 @@
name = "aiobotocore"
version = "2.24.2"
description = "Async client for aws services using botocore and aiohttp"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"aws\""
files = [
{file = "aiobotocore-2.24.2-py3-none-any.whl", hash = "sha256:808c63b2bd344b91e2f2acb874831118a9f53342d248acd16a68455a226e283a"},
{file = "aiobotocore-2.24.2.tar.gz", hash = "sha256:dfb21bdb2610e8de4d22f401e91a24d50f1330a302d03c62c485757becd439a9"},
@ -164,9 +165,10 @@ speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>
name = "aioitertools"
version = "0.12.0"
description = "itertools and builtins for AsyncIO and mixed iterables"
optional = false
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"aws\""
files = [
{file = "aioitertools-0.12.0-py3-none-any.whl", hash = "sha256:fc1f5fac3d737354de8831cbba3eb04f79dd649d8f3afb4c5b114925e662a796"},
{file = "aioitertools-0.12.0.tar.gz", hash = "sha256:c2a9055b4fbb7705f561b9d86053e8af5d10cc845d22c32008c43490b2d8dd6b"},
@ -576,9 +578,10 @@ dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)"
name = "backoff"
version = "2.2.1"
description = "Function decoration for backoff and retry"
optional = false
optional = true
python-versions = ">=3.7,<4.0"
groups = ["main"]
markers = "extra == \"deepeval\" or extra == \"posthog\" or extra == \"chromadb\" or extra == \"docs\" or extra == \"monitoring\""
files = [
{file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
{file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
@ -609,9 +612,10 @@ extras = ["regex"]
name = "baml-py"
version = "0.206.0"
description = "BAML python bindings (pyproject.toml)"
optional = false
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"baml\""
files = [
{file = "baml_py-0.206.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d9e95f0b481a18ae6936d720b8fc609baec4ea1eabbdde48f1536ffc94ebf39f"},
{file = "baml_py-0.206.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:0f698127da030b728c7aa2641c3164a3ab19779594019234361dd48f0784f195"},
@ -759,9 +763,10 @@ css = ["tinycss2 (>=1.1.0,<1.5)"]
name = "boto3"
version = "1.40.18"
description = "The AWS SDK for Python"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"neptune\" or extra == \"aws\""
files = [
{file = "boto3-1.40.18-py3-none-any.whl", hash = "sha256:daa776ba1251a7458c9d6c7627873d0c2460c8e8272d35759065580e9193700a"},
{file = "boto3-1.40.18.tar.gz", hash = "sha256:64301d39adecc154e3e595eaf0d4f28998ef0a5551f1d033aeac51a9e1a688e5"},
@ -779,9 +784,10 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
name = "botocore"
version = "1.40.18"
description = "Low-level, data-driven core of boto 3."
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"neptune\" or extra == \"aws\""
files = [
{file = "botocore-1.40.18-py3-none-any.whl", hash = "sha256:57025c46ca00cf8cec25de07a759521bfbfb3036a0f69b272654a354615dc45f"},
{file = "botocore-1.40.18.tar.gz", hash = "sha256:afd69bdadd8c55cc89d69de0799829e555193a352d87867f746e19020271cc0f"},
@ -945,9 +951,10 @@ pycparser = {version = "*", markers = "implementation_name != \"PyPy\""}
name = "cfgv"
version = "3.4.0"
description = "Validate configuration and produce human readable error messages."
optional = false
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"dev\""
files = [
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
@ -1150,7 +1157,7 @@ description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
groups = ["main"]
markers = "(platform_system == \"Windows\" or sys_platform == \"win32\" or os_name == \"nt\" or extra == \"llama-index\" or extra == \"dev\") and (platform_system == \"Windows\" or sys_platform == \"win32\" or extra == \"llama-index\" or extra == \"dev\" or extra == \"chromadb\") and (platform_system == \"Windows\" or python_version <= \"3.12\" or extra == \"notebook\" or extra == \"dev\" or extra == \"llama-index\" or extra == \"deepeval\" or extra == \"chromadb\") and (platform_system == \"Windows\" or extra == \"notebook\" or extra == \"dev\" or extra == \"llama-index\" or extra == \"deepeval\" or extra == \"chromadb\" or extra == \"codegraph\")"
markers = "(platform_system == \"Windows\" or sys_platform == \"win32\" or extra == \"llama-index\" or extra == \"dev\" or os_name == \"nt\") and (platform_system == \"Windows\" or sys_platform == \"win32\" or extra == \"llama-index\" or extra == \"dev\" or extra == \"chromadb\")"
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
@ -1194,10 +1201,10 @@ test = ["pytest"]
name = "contourpy"
version = "1.3.2"
description = "Python library for calculating contours of 2D quadrilateral grids"
optional = false
optional = true
python-versions = ">=3.10"
groups = ["main"]
markers = "python_version == \"3.10\""
markers = "python_version == \"3.10\" and extra == \"evals\""
files = [
{file = "contourpy-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba38e3f9f330af820c4b27ceb4b9c7feee5fe0493ea53a8720f4792667465934"},
{file = "contourpy-1.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc41ba0714aa2968d1f8674ec97504a8f7e334f48eeacebcaa6256213acb0989"},
@ -1272,10 +1279,10 @@ test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist"
name = "contourpy"
version = "1.3.3"
description = "Python library for calculating contours of 2D quadrilateral grids"
optional = false
optional = true
python-versions = ">=3.11"
groups = ["main"]
markers = "python_version >= \"3.11\""
markers = "python_version >= \"3.11\" and extra == \"evals\""
files = [
{file = "contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1"},
{file = "contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381"},
@ -1576,9 +1583,10 @@ files = [
name = "cycler"
version = "0.12.1"
description = "Composable style cycles"
optional = false
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"evals\""
files = [
{file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"},
{file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"},
@ -1847,9 +1855,10 @@ files = [
name = "distlib"
version = "0.4.0"
description = "Distribution utilities"
optional = false
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"dev\""
files = [
{file = "distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16"},
{file = "distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d"},
@ -1871,9 +1880,10 @@ files = [
name = "dlt"
version = "1.16.0"
description = "dlt is an open-source python-first scalable data loading library that does not require any backend to run."
optional = false
optional = true
python-versions = "<3.15,>=3.9.2"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "dlt-1.16.0-py3-none-any.whl", hash = "sha256:882ef281bbdc32eaba3b5ced984a8ed7014d8978fd7ab4a58b198023c8938c9f"},
{file = "dlt-1.16.0.tar.gz", hash = "sha256:113d17a3f27aa4f41c3438b0b032a68d30db195d8415a471ba43a9502e971a21"},
@ -2178,10 +2188,9 @@ sqlalchemy = {version = ">=2.0.0,<2.1.0", extras = ["asyncio"]}
name = "fastembed"
version = "0.6.0"
description = "Fast, light, accurate library built for retrieval embedding generation"
optional = true
optional = false
python-versions = ">=3.9.0"
groups = ["main"]
markers = "extra == \"codegraph\" and python_version <= \"3.12\""
files = [
{file = "fastembed-0.6.0-py3-none-any.whl", hash = "sha256:a08385e9388adea0529a586004f2d588c9787880a510e4e5d167127a11e75328"},
{file = "fastembed-0.6.0.tar.gz", hash = "sha256:5c9ead25f23449535b07243bbe1f370b820dcc77ec2931e61674e3fe7ff24733"},
@ -2194,8 +2203,12 @@ mmh3 = ">=4.1.0,<6.0.0"
numpy = [
{version = ">=1.21", markers = "python_version >= \"3.10\" and python_version < \"3.12\""},
{version = ">=1.26", markers = "python_version == \"3.12\""},
{version = ">=2.1.0", markers = "python_version >= \"3.13\""},
]
onnxruntime = [
{version = ">=1.17.0,<1.20.0 || >1.20.0", markers = "python_version >= \"3.10\" and python_version < \"3.13\""},
{version = ">1.20.0", markers = "python_version >= \"3.13\""},
]
onnxruntime = {version = ">=1.17.0,<1.20.0 || >1.20.0", markers = "python_version >= \"3.10\" and python_version < \"3.13\""}
pillow = ">=10.3.0,<12.0.0"
py-rust-stemmers = ">=0.1.0,<0.2.0"
requests = ">=2.31,<3.0"
@ -2241,6 +2254,7 @@ files = [
{file = "fastuuid-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b31dd488d0778c36f8279b306dc92a42f16904cba54acca71e107d65b60b0c"},
{file = "fastuuid-0.12.0-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:b19361ee649365eefc717ec08005972d3d1eb9ee39908022d98e3bfa9da59e37"},
{file = "fastuuid-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:8fc66b11423e6f3e1937385f655bedd67aebe56a3dcec0cb835351cfe7d358c9"},
{file = "fastuuid-0.12.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:2925f67b88d47cb16aa3eb1ab20fdcf21b94d74490e0818c91ea41434b987493"},
{file = "fastuuid-0.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7b15c54d300279ab20a9cc0579ada9c9f80d1bc92997fc61fb7bf3103d7cb26b"},
{file = "fastuuid-0.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:458f1bc3ebbd76fdb89ad83e6b81ccd3b2a99fa6707cd3650b27606745cfb170"},
{file = "fastuuid-0.12.0-cp38-cp38-manylinux_2_34_x86_64.whl", hash = "sha256:a8f0f83fbba6dc44271a11b22e15838641b8c45612cdf541b4822a5930f6893c"},
@ -2292,9 +2306,10 @@ files = [
name = "fonttools"
version = "4.60.0"
description = "Tools to manipulate font files"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"evals\""
files = [
{file = "fonttools-4.60.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:151282a235c36024168c21c02193e939e8b28c73d5fa0b36ae1072671d8fa134"},
{file = "fonttools-4.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3f32cc42d485d9b1546463b9a7a92bdbde8aef90bac3602503e04c2ddb27e164"},
@ -2581,9 +2596,10 @@ dev = ["flake8", "markdown", "twine", "wheel"]
name = "gitdb"
version = "4.0.12"
description = "Git Object Database"
optional = false
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"dlt\" or extra == \"dev\""
files = [
{file = "gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf"},
{file = "gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571"},
@ -2596,9 +2612,10 @@ smmap = ">=3.0.1,<6"
name = "gitpython"
version = "3.1.45"
description = "GitPython is a Python library used to interact with Git repositories"
optional = false
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"dlt\" or extra == \"dev\""
files = [
{file = "gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77"},
{file = "gitpython-3.1.45.tar.gz", hash = "sha256:85b0ee964ceddf211c41b9f27a49086010a190fd8132a24e21f362a4b36a791c"},
@ -2615,9 +2632,10 @@ test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock ; python_version < \"3.
name = "giturlparse"
version = "0.12.0"
description = "A Git URL parsing module (supports parsing and rewriting)"
optional = false
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "giturlparse-0.12.0-py2.py3-none-any.whl", hash = "sha256:412b74f2855f1da2fefa89fd8dde62df48476077a72fc19b62039554d27360eb"},
{file = "giturlparse-0.12.0.tar.gz", hash = "sha256:c0fff7c21acc435491b1779566e038757a205c1ffdcb47e4f81ea52ad8c3859a"},
@ -3110,9 +3128,10 @@ hyperframe = ">=6.1,<7"
name = "hexbytes"
version = "1.3.1"
description = "hexbytes: Python `bytes` subclass that decodes hex, with a readable console output"
optional = false
optional = true
python-versions = "<4,>=3.8"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "hexbytes-1.3.1-py3-none-any.whl", hash = "sha256:da01ff24a1a9a2b1881c4b85f0e9f9b0f51b526b379ffa23832ae7899d29c2c7"},
{file = "hexbytes-1.3.1.tar.gz", hash = "sha256:a657eebebdfe27254336f98d8af6e2236f3f83aed164b87466b6cf6c5f5a4765"},
@ -3371,9 +3390,10 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve
name = "humanize"
version = "4.13.0"
description = "Python humanize utilities"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "humanize-4.13.0-py3-none-any.whl", hash = "sha256:b810820b31891813b1673e8fec7f1ed3312061eab2f26e3fa192c393d11ed25f"},
{file = "humanize-4.13.0.tar.gz", hash = "sha256:78f79e68f76f0b04d711c4e55d32bebef5be387148862cb1ef83d2b58e7935a0"},
@ -3399,9 +3419,10 @@ files = [
name = "identify"
version = "2.6.14"
description = "File identification library for Python"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"dev\""
files = [
{file = "identify-2.6.14-py2.py3-none-any.whl", hash = "sha256:11a073da82212c6646b1f39bb20d4483bfb9543bd5566fec60053c4bb309bf2e"},
{file = "identify-2.6.14.tar.gz", hash = "sha256:663494103b4f717cb26921c52f8751363dc89db64364cd836a9bf1535f53cd6a"},
@ -3834,9 +3855,10 @@ files = [
name = "jmespath"
version = "1.0.1"
description = "JSON Matching Expressions"
optional = false
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"neptune\" or extra == \"aws\""
files = [
{file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
@ -3846,9 +3868,10 @@ files = [
name = "joblib"
version = "1.5.2"
description = "Lightweight pipelining with Python functions"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"llama-index\" or extra == \"docs\" or extra == \"evals\""
files = [
{file = "joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241"},
{file = "joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55"},
@ -3902,9 +3925,10 @@ jsonpointer = ">=1.9"
name = "jsonpath-ng"
version = "1.7.0"
description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming."
optional = false
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"},
]
@ -4212,9 +4236,10 @@ test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-v
name = "kiwisolver"
version = "1.4.9"
description = "A fast implementation of the Cassowary constraint solver"
optional = false
optional = true
python-versions = ">=3.10"
groups = ["main"]
markers = "extra == \"evals\""
files = [
{file = "kiwisolver-1.4.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b4b4d74bda2b8ebf4da5bd42af11d02d04428b2c32846e4c2c93219df8a7987b"},
{file = "kiwisolver-1.4.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fb3b8132019ea572f4611d770991000d7f58127560c4889729248eb5852a102f"},
@ -4555,9 +4580,10 @@ six = "*"
name = "langfuse"
version = "2.60.10"
description = "A client library for accessing langfuse"
optional = false
optional = true
python-versions = "<4.0,>=3.9"
groups = ["main"]
markers = "extra == \"monitoring\""
files = [
{file = "langfuse-2.60.10-py3-none-any.whl", hash = "sha256:815c6369194aa5b2a24f88eb9952f7c3fc863272c41e90642a71f3bc76f4a11f"},
{file = "langfuse-2.60.10.tar.gz", hash = "sha256:a26d0d927a28ee01b2d12bb5b862590b643cc4e60a28de6e2b0c2cfff5dbfc6a"},
@ -4776,10 +4802,9 @@ server = ["starlette (>=0.39.0)", "uvicorn (>=0.32.0)"]
name = "loguru"
version = "0.7.3"
description = "Python logging made (stupidly) simple"
optional = true
optional = false
python-versions = "<4.0,>=3.5"
groups = ["main"]
markers = "extra == \"codegraph\" and python_version <= \"3.12\""
files = [
{file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"},
{file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"},
@ -5130,9 +5155,10 @@ tests = ["pytest", "simplejson"]
name = "matplotlib"
version = "3.10.6"
description = "Python plotting package"
optional = false
optional = true
python-versions = ">=3.10"
groups = ["main"]
markers = "extra == \"evals\""
files = [
{file = "matplotlib-3.10.6-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bc7316c306d97463a9866b89d5cc217824e799fa0de346c8f68f4f3d27c8693d"},
{file = "matplotlib-3.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d00932b0d160ef03f59f9c0e16d1e3ac89646f7785165ce6ad40c842db16cc2e"},
@ -5491,10 +5517,9 @@ mkdocstrings = ">=0.26"
name = "mmh3"
version = "5.2.0"
description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions."
optional = true
optional = false
python-versions = ">=3.9"
groups = ["main"]
markers = "(extra == \"codegraph\" or extra == \"chromadb\") and (python_version <= \"3.12\" or extra == \"chromadb\")"
files = [
{file = "mmh3-5.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:81c504ad11c588c8629536b032940f2a359dda3b6cbfd4ad8f74cb24dcd1b0bc"},
{file = "mmh3-5.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0b898cecff57442724a0f52bf42c2de42de63083a91008fb452887e372f9c328"},
@ -6092,9 +6117,10 @@ test-extras = ["pytest-mpl", "pytest-randomly"]
name = "nltk"
version = "3.9.1"
description = "Natural Language Toolkit"
optional = false
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"llama-index\" or extra == \"docs\""
files = [
{file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"},
{file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"},
@ -6118,9 +6144,10 @@ twitter = ["twython"]
name = "nodeenv"
version = "1.9.1"
description = "Node.js virtual environment builder"
optional = false
optional = true
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
groups = ["main"]
markers = "extra == \"dev\""
files = [
{file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
@ -6644,10 +6671,10 @@ test = ["pytest (>=8.3.0,<8.4.0)", "pytest-benchmark (>=5.1.0,<5.2.0)", "pytest-
name = "orjson"
version = "3.11.3"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "(sys_platform != \"emscripten\" or platform_python_implementation != \"PyPy\" or extra == \"chromadb\") and (sys_platform != \"emscripten\" or extra == \"neptune\" or extra == \"langchain\" or extra == \"chromadb\")"
markers = "(sys_platform != \"emscripten\" or platform_python_implementation != \"PyPy\" or extra == \"chromadb\") and (sys_platform != \"emscripten\" or extra == \"neptune\" or extra == \"langchain\" or extra == \"chromadb\") and (platform_python_implementation != \"PyPy\" or extra == \"chromadb\" or extra == \"dlt\") and (extra == \"neptune\" or extra == \"langchain\" or extra == \"chromadb\" or extra == \"dlt\")"
files = [
{file = "orjson-3.11.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:29cb1f1b008d936803e2da3d7cba726fc47232c45df531b29edf0b232dd737e7"},
{file = "orjson-3.11.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97dceed87ed9139884a55db8722428e27bd8452817fbf1869c58b49fecab1120"},
@ -6779,9 +6806,10 @@ lint = ["black"]
name = "pandas"
version = "2.3.2"
description = "Powerful data structures for data analysis, time series, and statistics"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"docs\" or extra == \"evals\""
files = [
{file = "pandas-2.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52bc29a946304c360561974c6542d1dd628ddafa69134a7131fdfd6a5d7a1a35"},
{file = "pandas-2.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:220cc5c35ffaa764dd5bb17cf42df283b5cb7fdf49e10a7b053a06c9cb48ee2b"},
@ -6909,9 +6937,10 @@ files = [
name = "pathvalidate"
version = "3.3.1"
description = "pathvalidate is a Python library to sanitize/validate a string such as filenames/file-paths/etc."
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "pathvalidate-3.3.1-py3-none-any.whl", hash = "sha256:5263baab691f8e1af96092fa5137ee17df5bdfbd6cff1fcac4d6ef4bc2e1735f"},
{file = "pathvalidate-3.3.1.tar.gz", hash = "sha256:b18c07212bfead624345bb8e1d6141cdcf15a39736994ea0b94035ad2b1ba177"},
@ -6926,9 +6955,10 @@ test = ["Faker (>=1.0.8)", "allpairspy (>=2)", "click (>=6.2)", "pytest (>=6.0.1
name = "pendulum"
version = "3.1.0"
description = "Python datetimes made easy"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "pendulum-3.1.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:aa545a59e6517cf43597455a6fb44daa4a6e08473d67a7ad34e4fa951efb9620"},
{file = "pendulum-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:299df2da6c490ede86bb8d58c65e33d7a2a42479d21475a54b467b03ccb88531"},
@ -7206,9 +7236,10 @@ kaleido = ["kaleido (>=1.0.0)"]
name = "pluggy"
version = "1.6.0"
description = "plugin and hook calling mechanisms for python"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"deepeval\" or extra == \"dev\" or extra == \"dlt\""
files = [
{file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"},
{file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"},
@ -7222,9 +7253,10 @@ testing = ["coverage", "pytest", "pytest-benchmark"]
name = "ply"
version = "3.11"
description = "Python Lex & Yacc"
optional = false
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"},
{file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"},
@ -7297,9 +7329,10 @@ test = ["anthropic", "coverage", "django", "flake8", "freezegun (==1.5.1)", "lan
name = "pre-commit"
version = "4.3.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"dev\""
files = [
{file = "pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8"},
{file = "pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16"},
@ -7667,10 +7700,9 @@ bcrypt = ["bcrypt (>=4.1.2,<5)"]
name = "py-rust-stemmers"
version = "0.1.5"
description = "Fast and parallel snowball stemmer"
optional = true
optional = false
python-versions = "*"
groups = ["main"]
markers = "extra == \"codegraph\" and python_version <= \"3.12\""
files = [
{file = "py_rust_stemmers-0.1.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bfbd9034ae00419ff2154e33b8f5b4c4d99d1f9271f31ed059e5c7e9fa005844"},
{file = "py_rust_stemmers-0.1.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7162ae66df2bb0fc39b350c24a049f5f5151c03c046092ba095c2141ec223a2"},
@ -8268,66 +8300,6 @@ files = [
[package.extras]
dev = ["build", "flake8", "mypy", "pytest", "twine"]
[[package]]
name = "pyside6"
version = "6.9.2"
description = "Python bindings for the Qt cross-platform application and UI framework"
optional = true
python-versions = "<3.14,>=3.9"
groups = ["main"]
markers = "extra == \"gui\""
files = [
{file = "pyside6-6.9.2-cp39-abi3-macosx_12_0_universal2.whl", hash = "sha256:71245c76bfbe5c41794ffd8546730ec7cc869d4bbe68535639e026e4ef8a7714"},
{file = "pyside6-6.9.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:64a9e2146e207d858e00226f68d7c1b4ab332954742a00dcabb721bb9e4aa0cd"},
{file = "pyside6-6.9.2-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:a78fad16241a1f2ed0fa0098cf3d621f591fc75b4badb7f3fa3959c9d861c806"},
{file = "pyside6-6.9.2-cp39-abi3-win_amd64.whl", hash = "sha256:d1afbf48f9a5612b9ee2dc7c384c1a65c08b5830ba5e7d01f66d82678e5459df"},
{file = "pyside6-6.9.2-cp39-abi3-win_arm64.whl", hash = "sha256:1499b1d7629ab92119118e2636b4ace836b25e457ddf01003fdca560560b8c0a"},
]
[package.dependencies]
PySide6_Addons = "6.9.2"
PySide6_Essentials = "6.9.2"
shiboken6 = "6.9.2"
[[package]]
name = "pyside6-addons"
version = "6.9.2"
description = "Python bindings for the Qt cross-platform application and UI framework (Addons)"
optional = true
python-versions = "<3.14,>=3.9"
groups = ["main"]
markers = "extra == \"gui\""
files = [
{file = "pyside6_addons-6.9.2-cp39-abi3-macosx_12_0_universal2.whl", hash = "sha256:7019fdcc0059626eb1608b361371f4dc8cb7f2d02f066908fd460739ff5a07cd"},
{file = "pyside6_addons-6.9.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:24350e5415317f269e743d1f7b4933fe5f59d90894aa067676c9ce6bfe9e7988"},
{file = "pyside6_addons-6.9.2-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:af8dee517de8d336735a6543f7dd496eb580e852c14b4d2304b890e2a29de499"},
{file = "pyside6_addons-6.9.2-cp39-abi3-win_amd64.whl", hash = "sha256:98d2413904ee4b2b754b077af7875fa6ec08468c01a6628a2c9c3d2cece4874f"},
{file = "pyside6_addons-6.9.2-cp39-abi3-win_arm64.whl", hash = "sha256:b430cae782ff1a99fb95868043557f22c31b30c94afb9cf73278584e220a2ab6"},
]
[package.dependencies]
PySide6_Essentials = "6.9.2"
shiboken6 = "6.9.2"
[[package]]
name = "pyside6-essentials"
version = "6.9.2"
description = "Python bindings for the Qt cross-platform application and UI framework (Essentials)"
optional = true
python-versions = "<3.14,>=3.9"
groups = ["main"]
markers = "extra == \"gui\""
files = [
{file = "pyside6_essentials-6.9.2-cp39-abi3-macosx_12_0_universal2.whl", hash = "sha256:713eb8dcbb016ff10e6fca129c1bf2a0fd8cfac979e689264e0be3b332f9398e"},
{file = "pyside6_essentials-6.9.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:84b8ca4fa56506e2848bdb4c7a0851a5e7adcb916bef9bce25ce2eeb6c7002cc"},
{file = "pyside6_essentials-6.9.2-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:d0f701503974bd51b408966539aa6956f3d8536e547ea8002fbfb3d77796bbc3"},
{file = "pyside6_essentials-6.9.2-cp39-abi3-win_amd64.whl", hash = "sha256:b2f746f795138ac63eb173f9850a6db293461a1b6ce22cf6dafac7d194a38951"},
{file = "pyside6_essentials-6.9.2-cp39-abi3-win_arm64.whl", hash = "sha256:ecd7b5cd9e271f397fb89a6357f4ec301d8163e50869c6c557f9ccc6bed42789"},
]
[package.dependencies]
shiboken6 = "6.9.2"
[[package]]
name = "pysocks"
version = "1.7.1"
@ -8621,9 +8593,10 @@ XlsxWriter = ">=0.5.7"
name = "pytz"
version = "2025.2"
description = "World timezone definitions, modern and historical"
optional = false
optional = true
python-versions = "*"
groups = ["main"]
markers = "extra == \"neo4j\" or extra == \"graphiti\" or extra == \"docs\" or extra == \"evals\" or extra == \"dlt\""
files = [
{file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"},
{file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"},
@ -8636,7 +8609,7 @@ description = "Python for Window Extensions"
optional = false
python-versions = "*"
groups = ["main"]
markers = "sys_platform == \"win32\" or platform_system == \"Windows\""
markers = "(sys_platform == \"win32\" or platform_system == \"Windows\") and (platform_python_implementation != \"PyPy\" or platform_system == \"Windows\" or extra == \"dlt\")"
files = [
{file = "pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3"},
{file = "pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b"},
@ -8863,19 +8836,6 @@ files = [
[package.dependencies]
cffi = {version = "*", markers = "implementation_name == \"pypy\""}
[[package]]
name = "qasync"
version = "0.27.1"
description = "Python library for using asyncio in Qt-based applications"
optional = true
python-versions = ">=3.8,<4.0"
groups = ["main"]
markers = "extra == \"gui\""
files = [
{file = "qasync-0.27.1-py3-none-any.whl", hash = "sha256:5d57335723bc7d9b328dadd8cb2ed7978640e4bf2da184889ce50ee3ad2602c7"},
{file = "qasync-0.27.1.tar.gz", hash = "sha256:8dc768fd1ee5de1044c7c305eccf2d39d24d87803ea71189d4024fb475f4985f"},
]
[[package]]
name = "rapidfuzz"
version = "3.14.1"
@ -9231,9 +9191,10 @@ requests = ">=2.0.1,<3.0.0"
name = "requirements-parser"
version = "0.13.0"
description = "This is a small Python module for parsing Pip requirement files."
optional = false
optional = true
python-versions = "<4.0,>=3.8"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "requirements_parser-0.13.0-py3-none-any.whl", hash = "sha256:2b3173faecf19ec5501971b7222d38f04cb45bb9d87d0ad629ca71e2e62ded14"},
{file = "requirements_parser-0.13.0.tar.gz", hash = "sha256:0843119ca2cb2331de4eb31b10d70462e39ace698fd660a915c247d2301a4418"},
@ -9313,9 +9274,10 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
name = "rich-argparse"
version = "1.7.1"
description = "Rich help formatters for argparse and optparse"
optional = false
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "rich_argparse-1.7.1-py3-none-any.whl", hash = "sha256:a8650b42e4a4ff72127837632fba6b7da40784842f08d7395eb67a9cbd7b4bf9"},
{file = "rich_argparse-1.7.1.tar.gz", hash = "sha256:d7a493cde94043e41ea68fb43a74405fa178de981bf7b800f7a3bd02ac5c27be"},
@ -9539,9 +9501,10 @@ files = [
name = "s3fs"
version = "2025.3.2"
description = "Convenient Filesystem interface over S3"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"aws\""
files = [
{file = "s3fs-2025.3.2-py3-none-any.whl", hash = "sha256:81eae3f37b4b04bcc08845d7bcc607c6ca45878813ef7e6a28d77b2688417130"},
{file = "s3fs-2025.3.2.tar.gz", hash = "sha256:6798f896ec76dd3bfd8beb89f0bb7c5263cb2760e038bae0978505cd172a307c"},
@ -9563,9 +9526,10 @@ boto3 = ["aiobotocore[boto3] (>=2.5.4,<3.0.0)"]
name = "s3transfer"
version = "0.13.1"
description = "An Amazon S3 Transfer Manager"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"neptune\" or extra == \"aws\""
files = [
{file = "s3transfer-0.13.1-py3-none-any.whl", hash = "sha256:a981aa7429be23fe6dfc13e80e4020057cbab622b08c0315288758d67cabc724"},
{file = "s3transfer-0.13.1.tar.gz", hash = "sha256:c3fdba22ba1bd367922f27ec8032d6a1cf5f10c934fb5d68cf60fd5a23d936cf"},
@ -9621,9 +9585,10 @@ torch = ["safetensors[numpy]", "torch (>=1.10)"]
name = "scikit-learn"
version = "1.7.2"
description = "A set of python modules for machine learning and data mining"
optional = false
optional = true
python-versions = ">=3.10"
groups = ["main"]
markers = "extra == \"evals\""
files = [
{file = "scikit_learn-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b33579c10a3081d076ab403df4a4190da4f4432d443521674637677dc91e61f"},
{file = "scikit_learn-1.7.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:36749fb62b3d961b1ce4fedf08fa57a1986cd409eff2d783bca5d4b9b5fce51c"},
@ -9677,10 +9642,10 @@ tests = ["matplotlib (>=3.5.0)", "mypy (>=1.15)", "numpydoc (>=1.2.0)", "pandas
name = "scipy"
version = "1.15.3"
description = "Fundamental algorithms for scientific computing in Python"
optional = false
optional = true
python-versions = ">=3.10"
groups = ["main"]
markers = "python_version == \"3.10\""
markers = "python_version == \"3.10\" and extra == \"evals\""
files = [
{file = "scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a345928c86d535060c9c2b25e71e87c39ab2f22fc96e9636bd74d1dbf9de448c"},
{file = "scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:ad3432cb0f9ed87477a8d97f03b763fd1d57709f1bbde3c9369b1dff5503b253"},
@ -9742,10 +9707,10 @@ test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis
name = "scipy"
version = "1.16.2"
description = "Fundamental algorithms for scientific computing in Python"
optional = false
optional = true
python-versions = ">=3.11"
groups = ["main"]
markers = "python_version >= \"3.11\""
markers = "python_version >= \"3.11\" and extra == \"evals\""
files = [
{file = "scipy-1.16.2-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:6ab88ea43a57da1af33292ebd04b417e8e2eaf9d5aa05700be8d6e1b6501cd92"},
{file = "scipy-1.16.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c95e96c7305c96ede73a7389f46ccd6c659c4da5ef1b2789466baeaed3622b6e"},
@ -9822,9 +9787,10 @@ test = ["Cython", "array-api-strict (>=2.3.1)", "asv", "gmpy2", "hypothesis (>=6
name = "semver"
version = "3.0.4"
description = "Python helper for Semantic Versioning (https://semver.org)"
optional = false
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746"},
{file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"},
@ -9852,9 +9818,10 @@ win32 = ["pywin32 ; sys_platform == \"win32\""]
name = "sentry-sdk"
version = "2.38.0"
description = "Python client for Sentry (https://sentry.io)"
optional = false
optional = true
python-versions = ">=3.6"
groups = ["main"]
markers = "extra == \"deepeval\" or extra == \"monitoring\""
files = [
{file = "sentry_sdk-2.38.0-py2.py3-none-any.whl", hash = "sha256:2324aea8573a3fa1576df7fb4d65c4eb8d9929c8fa5939647397a07179eef8d0"},
{file = "sentry_sdk-2.38.0.tar.gz", hash = "sha256:792d2af45e167e2f8a3347143f525b9b6bac6f058fb2014720b40b84ccbeb985"},
@ -9911,9 +9878,10 @@ unleash = ["UnleashClient (>=6.0.1)"]
name = "setuptools"
version = "80.9.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"notebook\" or extra == \"dev\" or extra == \"llama-index\" or extra == \"deepeval\" or extra == \"dlt\""
files = [
{file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"},
{file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"},
@ -9940,22 +9908,6 @@ files = [
{file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"},
]
[[package]]
name = "shiboken6"
version = "6.9.2"
description = "Python/C++ bindings helper module"
optional = true
python-versions = "<3.14,>=3.9"
groups = ["main"]
markers = "extra == \"gui\""
files = [
{file = "shiboken6-6.9.2-cp39-abi3-macosx_12_0_universal2.whl", hash = "sha256:8bb1c4326330e53adeac98bfd9dcf57f5173a50318a180938dcc4825d9ca38da"},
{file = "shiboken6-6.9.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3b54c0a12ea1b03b9dc5dcfb603c366e957dc75341bf7cb1cc436d0d848308ee"},
{file = "shiboken6-6.9.2-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:a5f5985938f5acb604c23536a0ff2efb3cccb77d23da91fbaff8fd8ded3dceb4"},
{file = "shiboken6-6.9.2-cp39-abi3-win_amd64.whl", hash = "sha256:68c33d565cd4732be762d19ff67dfc53763256bac413d392aa8598b524980bc4"},
{file = "shiboken6-6.9.2-cp39-abi3-win_arm64.whl", hash = "sha256:c5b827797b3d89d9b9a3753371ff533fcd4afc4531ca51a7c696952132098054"},
]
[[package]]
name = "sigtools"
version = "4.0.1"
@ -9980,9 +9932,10 @@ tests = ["coverage", "mock", "repeated-test (>=2.2.1)", "sphinx"]
name = "simplejson"
version = "3.20.1"
description = "Simple, fast, extensible JSON encoder/decoder for Python"
optional = false
optional = true
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.5"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "simplejson-3.20.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:f5272b5866b259fe6c33c4a8c5073bf8b359c3c97b70c298a2f09a69b52c7c41"},
{file = "simplejson-3.20.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5c0de368f3052a59a1acf21f8b2dd28686a9e4eba2da7efae7ed9554cb31e7bc"},
@ -10112,9 +10065,10 @@ files = [
name = "smmap"
version = "5.0.2"
description = "A pure Python implementation of a sliding window memory map manager"
optional = false
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"dlt\" or extra == \"dev\""
files = [
{file = "smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e"},
{file = "smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5"},
@ -10245,9 +10199,10 @@ sqlcipher = ["sqlcipher3_binary"]
name = "sqlglot"
version = "27.16.3"
description = "An easily customizable SQL parser and transpiler"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"dlt\""
files = [
{file = "sqlglot-27.16.3-py3-none-any.whl", hash = "sha256:3765ef1da6c9a04dd9e9ab4bcf24ca54daae72d86d693954aed84dbbbff2ff3b"},
{file = "sqlglot-27.16.3.tar.gz", hash = "sha256:bf5cc3b7c90c3682365353a318089e69e859939943d7882562ba39be650a6202"},
@ -10406,9 +10361,10 @@ typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"]
name = "threadpoolctl"
version = "3.6.0"
description = "threadpoolctl"
optional = false
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"evals\""
files = [
{file = "threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb"},
{file = "threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"},
@ -10575,9 +10531,10 @@ files = [
name = "tomlkit"
version = "0.13.3"
description = "Style preserving TOML library"
optional = false
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"dlt\" or extra == \"dev\""
files = [
{file = "tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0"},
{file = "tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1"},
@ -10914,9 +10871,10 @@ typing-extensions = ">=4.12.0"
name = "tzdata"
version = "2025.2"
description = "Provider of IANA time zone data"
optional = false
optional = true
python-versions = ">=2"
groups = ["main"]
markers = "extra == \"docs\" or extra == \"evals\" or extra == \"dlt\""
files = [
{file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"},
{file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"},
@ -11157,9 +11115,10 @@ test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil",
name = "virtualenv"
version = "20.34.0"
description = "Virtual Python Environment builder"
optional = false
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"dev\""
files = [
{file = "virtualenv-20.34.0-py3-none-any.whl", hash = "sha256:341f5afa7eee943e4984a9207c025feedd768baff6753cd660c857ceb3e36026"},
{file = "virtualenv-20.34.0.tar.gz", hash = "sha256:44815b2c9dee7ed86e387b842a84f20b93f7f417f95886ca1996a72a4138eb1a"},
@ -11495,10 +11454,10 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"]
name = "win-precise-time"
version = "1.4.2"
description = ""
optional = false
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "os_name == \"nt\" and python_version <= \"3.12\""
markers = "extra == \"dlt\" and os_name == \"nt\" and python_version <= \"3.12\""
files = [
{file = "win-precise-time-1.4.2.tar.gz", hash = "sha256:89274785cbc5f2997e01675206da3203835a442c60fd97798415c6b3c179c0b9"},
{file = "win_precise_time-1.4.2-cp310-cp310-win32.whl", hash = "sha256:7fa13a2247c2ef41cd5e9b930f40716eacc7fc1f079ea72853bd5613fe087a1a"},
@ -11519,10 +11478,10 @@ files = [
name = "win32-setctime"
version = "1.2.0"
description = "A small Python utility to set file creation time on Windows"
optional = true
optional = false
python-versions = ">=3.5"
groups = ["main"]
markers = "sys_platform == \"win32\" and extra == \"codegraph\" and python_version <= \"3.12\""
markers = "sys_platform == \"win32\""
files = [
{file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"},
{file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"},
@ -11909,23 +11868,25 @@ cffi = ["cffi (>=1.17,<2.0) ; platform_python_implementation != \"PyPy\" and pyt
anthropic = ["anthropic"]
api = []
aws = ["s3fs"]
baml = ["baml-py"]
chromadb = ["chromadb", "pypika"]
codegraph = ["fastembed", "transformers", "tree-sitter", "tree-sitter-python"]
debug = ["debugpy"]
deepeval = ["deepeval"]
dev = ["coverage", "deptry", "gitpython", "mkdocs-material", "mkdocs-minify-plugin", "mkdocstrings", "mypy", "notebook", "pylint", "pytest", "pytest-asyncio", "pytest-cov", "ruff", "tweepy"]
dev = ["coverage", "deptry", "gitpython", "mkdocs-material", "mkdocs-minify-plugin", "mkdocstrings", "mypy", "notebook", "pre-commit", "pylint", "pytest", "pytest-asyncio", "pytest-cov", "ruff", "tweepy"]
distributed = ["modal"]
dlt = ["dlt"]
docs = ["unstructured"]
evals = ["gdown", "plotly"]
evals = ["gdown", "matplotlib", "pandas", "plotly", "scikit-learn"]
falkordb = ["falkordb"]
gemini = ["google-generativeai"]
graphiti = ["graphiti-core"]
groq = ["groq"]
gui = ["pyside6", "qasync"]
huggingface = ["transformers"]
langchain = ["langchain_text_splitters", "langsmith"]
llama-index = ["llama-index-core"]
mistral = ["mistral-common"]
monitoring = ["langfuse", "sentry-sdk"]
neo4j = ["neo4j"]
neptune = ["langchain_aws"]
notebook = ["notebook"]
@ -11937,4 +11898,4 @@ posthog = ["posthog"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.10,<=3.13"
content-hash = "fc32c4e05914cf364d4feb0c3baec9771d149eb65626e5337fa2eb55e7130499"
content-hash = "49d2d5207803b6b56e0bbf2be871941f4fc4535d0bc823815e0a7cbe61e12574"

View file

@ -20,49 +20,38 @@ classifiers = [
"Operating System :: Microsoft :: Windows",
]
dependencies = [
"openai>=1.80.1,<2.0.0",
"openai>=1.80.1",
"python-dotenv>=1.0.1,<2.0.0",
"pydantic>=2.10.5,<3.0.0",
"pydantic-settings>=2.2.1,<3",
"typing_extensions>=4.12.2,<5.0.0",
"nltk>=3.9.1,<4.0.0",
"numpy>=1.26.4, <=4.0.0",
"pandas>=2.2.2,<3.0.0",
# Note: New s3fs and boto3 versions don't work well together
# Always use comaptible fixed versions of these two dependencies
"s3fs[boto3]==2025.3.2",
"sqlalchemy>=2.0.39,<3.0.0",
"aiosqlite>=0.20.0,<1.0.0",
"tiktoken>=0.8.0,<1.0.0",
"litellm>=1.71.0, <2.0.0",
"litellm>=1.76.0",
"instructor>=1.9.1,<2.0.0",
"langfuse>=2.32.0,<3",
"filetype>=1.2.0,<2.0.0",
"aiohttp>=3.11.14,<4.0.0",
"aiofiles>=23.2.1,<24.0.0",
"rdflib>=7.1.4,<7.2.0",
"pypdf>=4.1.0,<7.0.0",
"jinja2>=3.1.3,<4",
"matplotlib>=3.8.3,<4",
"networkx>=3.4.2,<4",
"lancedb>=0.24.0,<1.0.0",
"nbformat>=5.7.0,<6.0.0",
"alembic>=1.13.3,<2",
"pre-commit>=4.0.1,<5",
"scikit-learn>=1.6.1,<2",
"limits>=4.4.1,<5",
"fastapi>=0.115.7,<1.0.0",
"python-multipart>=0.0.20,<1.0.0",
"fastapi-users[sqlalchemy]>=14.0.1,<15.0.0",
"dlt[sqlalchemy]>=1.9.0,<2",
"sentry-sdk[fastapi]>=2.9.0,<3",
"structlog>=25.2.0,<26",
"baml-py (==0.206.0)",
"pympler>=1.1,<2.0.0",
"onnxruntime>=1.0.0,<2.0.0",
"pylance>=0.22.0,<1.0.0",
"onnxruntime<=1.22.1",
"pylance>=0.22.0,<=0.36.0",
"kuzu (==0.11.0)",
"python-magic-bin<0.5 ; platform_system == 'Windows'", # Only needed for Windows
"fastembed<=0.6.0",
"networkx>=3.4.2,<4",
"uvicorn>=0.34.0,<1.0.0",
"gunicorn>=20.1.0,<24",
"websockets>=15.0.1,<16.0.0",
@ -71,6 +60,7 @@ dependencies = [
[project.optional-dependencies]
api=[]
distributed = [
"modal>=1.0.5,<2.0.0",
]
@ -116,25 +106,28 @@ codegraph = [
evals = [
"plotly>=6.0.0,<7",
"gdown>=5.2.0,<6",
"pandas>=2.2.2,<3.0.0",
"matplotlib>=3.8.3,<4",
"scikit-learn>=1.6.1,<2",
]
gui = [
"pyside6>=6.8.3,<7",
"qasync>=0.27.1,<0.28",
]
graphiti = ["graphiti-core>=0.7.0,<0.8"]
# Note: New s3fs and boto3 versions don't work well together
# Always use comaptible fixed versions of these two dependencies
aws = ["s3fs[boto3]==2025.3.2"]
dlt = ["dlt[sqlalchemy]>=1.9.0,<2"]
baml = ["baml-py (==0.206.0)"]
dev = [
"pytest>=7.4.0,<8",
"pytest-cov>=6.1.1,<7.0.0",
"pytest-asyncio>=0.21.1,<0.22",
"coverage>=7.3.2,<8",
"mypy>=1.7.1,<2",
"pre-commit>=4.0.1,<5",
"notebook>=7.1.0,<8",
"deptry>=0.20.0,<0.21",
"pylint>=3.0.3,<4",
"ruff>=0.9.2,<1.0.0",
"ruff>=0.9.2,<=0.13.1",
"tweepy>=4.14.0,<5.0.0",
"gitpython>=3.1.43,<4",
"mkdocs-material>=9.5.42,<10",
@ -143,6 +136,8 @@ dev = [
]
debug = ["debugpy>=1.8.9,<2.0.0"]
monitoring = ["sentry-sdk[fastapi]>=2.9.0,<3", "langfuse>=2.32.0,<3"]
[project.urls]
Homepage = "https://www.cognee.ai"
Repository = "https://github.com/topoteretes/cognee"

303
uv.lock generated
View file

@ -2,16 +2,16 @@ version = 1
requires-python = ">=3.10, <=3.13"
resolution-markers = [
"python_full_version >= '3.13' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version < '3.11' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version >= '3.13' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version < '3.11' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version >= '3.13' and sys_platform == 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and sys_platform == 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and sys_platform == 'emscripten'",
"python_full_version < '3.11' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version < '3.11' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version < '3.11' and sys_platform == 'emscripten'",
]
@ -221,7 +221,7 @@ wheels = [
[[package]]
name = "anyio"
version = "4.10.0"
version = "4.11.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
@ -229,9 +229,9 @@ dependencies = [
{ name = "sniffio" },
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252 }
sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213 },
{ url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" },
]
[[package]]
@ -296,14 +296,14 @@ wheels = [
[[package]]
name = "asgiref"
version = "3.9.1"
version = "3.9.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/90/61/0aa957eec22ff70b830b22ff91f825e70e1ef732c06666a805730f28b36b/asgiref-3.9.1.tar.gz", hash = "sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142", size = 36870 }
sdist = { url = "https://files.pythonhosted.org/packages/7f/bf/0f3ecda32f1cb3bf1dca480aca08a7a8a3bdc4bed2343a103f30731565c9/asgiref-3.9.2.tar.gz", hash = "sha256:a0249afacb66688ef258ffe503528360443e2b9a8d8c4581b6ebefa58c841ef1", size = 36894, upload-time = "2025-09-23T15:00:55.136Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790 },
{ url = "https://files.pythonhosted.org/packages/c7/d1/69d02ce34caddb0a7ae088b84c356a625a93cd4ff57b2f97644c03fad905/asgiref-3.9.2-py3-none-any.whl", hash = "sha256:0b61526596219d70396548fc003635056856dba5d0d086f86476f10b33c75960", size = 23788, upload-time = "2025-09-23T15:00:53.627Z" },
]
[[package]]
@ -821,31 +821,24 @@ dependencies = [
{ name = "aiohttp" },
{ name = "aiosqlite" },
{ name = "alembic" },
{ name = "baml-py" },
{ name = "dlt", extra = ["sqlalchemy"] },
{ name = "fastapi" },
{ name = "fastapi-users", extra = ["sqlalchemy"] },
{ name = "fastembed" },
{ name = "filetype" },
{ name = "gunicorn" },
{ name = "instructor" },
{ name = "jinja2" },
{ name = "kuzu" },
{ name = "lancedb" },
{ name = "langfuse" },
{ name = "limits" },
{ name = "litellm" },
{ name = "matplotlib" },
{ name = "mistralai" },
{ name = "nbformat" },
{ name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
{ name = "networkx", version = "3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
{ name = "nltk" },
{ name = "numpy", version = "1.26.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
{ name = "numpy", version = "2.3.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
{ name = "onnxruntime" },
{ name = "openai" },
{ name = "pandas" },
{ name = "pre-commit" },
{ name = "pydantic" },
{ name = "pydantic-settings" },
{ name = "pylance" },
@ -855,9 +848,6 @@ dependencies = [
{ name = "python-magic-bin", marker = "sys_platform == 'win32'" },
{ name = "python-multipart" },
{ name = "rdflib" },
{ name = "s3fs", extra = ["boto3"] },
{ name = "scikit-learn" },
{ name = "sentry-sdk", extra = ["fastapi"] },
{ name = "sqlalchemy" },
{ name = "structlog" },
{ name = "tiktoken" },
@ -873,6 +863,9 @@ anthropic = [
aws = [
{ name = "s3fs", extra = ["boto3"] },
]
baml = [
{ name = "baml-py" },
]
chromadb = [
{ name = "chromadb" },
{ name = "pypika" },
@ -898,6 +891,7 @@ dev = [
{ name = "mkdocstrings", extra = ["python"] },
{ name = "mypy" },
{ name = "notebook" },
{ name = "pre-commit" },
{ name = "pylint" },
{ name = "pytest" },
{ name = "pytest-asyncio" },
@ -908,12 +902,18 @@ dev = [
distributed = [
{ name = "modal" },
]
dlt = [
{ name = "dlt", extra = ["sqlalchemy"] },
]
docs = [
{ name = "unstructured", extra = ["csv", "doc", "docx", "epub", "md", "odt", "org", "ppt", "pptx", "rst", "rtf", "tsv", "xlsx"] },
]
evals = [
{ name = "gdown" },
{ name = "matplotlib" },
{ name = "pandas" },
{ name = "plotly" },
{ name = "scikit-learn" },
]
falkordb = [
{ name = "falkordb" },
@ -927,10 +927,6 @@ graphiti = [
groq = [
{ name = "groq" },
]
gui = [
{ name = "pyside6" },
{ name = "qasync" },
]
huggingface = [
{ name = "transformers" },
]
@ -944,6 +940,10 @@ llama-index = [
mistral = [
{ name = "mistral-common" },
]
monitoring = [
{ name = "langfuse" },
{ name = "sentry-sdk", extra = ["fastapi"] },
]
neo4j = [
{ name = "neo4j" },
]
@ -979,16 +979,17 @@ requires-dist = [
{ name = "anthropic", marker = "extra == 'anthropic'", specifier = ">=0.27" },
{ name = "asyncpg", marker = "extra == 'postgres'", specifier = ">=0.30.0,<1.0.0" },
{ name = "asyncpg", marker = "extra == 'postgres-binary'", specifier = ">=0.30.0,<1.0.0" },
{ name = "baml-py", specifier = "==0.206.0" },
{ name = "baml-py", marker = "extra == 'baml'", specifier = "==0.206.0" },
{ name = "chromadb", marker = "extra == 'chromadb'", specifier = ">=0.6,<0.7" },
{ name = "coverage", marker = "extra == 'dev'", specifier = ">=7.3.2,<8" },
{ name = "debugpy", marker = "extra == 'debug'", specifier = ">=1.8.9,<2.0.0" },
{ name = "deepeval", marker = "extra == 'deepeval'", specifier = ">=3.0.1,<4" },
{ name = "deptry", marker = "extra == 'dev'", specifier = ">=0.20.0,<0.21" },
{ name = "dlt", extras = ["sqlalchemy"], specifier = ">=1.9.0,<2" },
{ name = "dlt", extras = ["sqlalchemy"], marker = "extra == 'dlt'", specifier = ">=1.9.0,<2" },
{ name = "falkordb", marker = "extra == 'falkordb'", specifier = ">=1.0.9,<2.0.0" },
{ name = "fastapi", specifier = ">=0.115.7,<1.0.0" },
{ name = "fastapi-users", extras = ["sqlalchemy"], specifier = ">=14.0.1,<15.0.0" },
{ name = "fastembed", specifier = "<=0.6.0" },
{ name = "fastembed", marker = "python_full_version < '3.13' and extra == 'codegraph'", specifier = "<=0.6.0" },
{ name = "filetype", specifier = ">=1.2.0,<2.0.0" },
{ name = "gdown", marker = "extra == 'evals'", specifier = ">=5.2.0,<6" },
@ -1003,12 +1004,12 @@ requires-dist = [
{ name = "lancedb", specifier = ">=0.24.0,<1.0.0" },
{ name = "langchain-aws", marker = "extra == 'neptune'", specifier = ">=0.2.22" },
{ name = "langchain-text-splitters", marker = "extra == 'langchain'", specifier = ">=0.3.2,<1.0.0" },
{ name = "langfuse", specifier = ">=2.32.0,<3" },
{ name = "langfuse", marker = "extra == 'monitoring'", specifier = ">=2.32.0,<3" },
{ name = "langsmith", marker = "extra == 'langchain'", specifier = ">=0.2.3,<1.0.0" },
{ name = "limits", specifier = ">=4.4.1,<5" },
{ name = "litellm", specifier = ">=1.71.0,<2.0.0" },
{ name = "litellm", specifier = ">=1.76.0" },
{ name = "llama-index-core", marker = "extra == 'llama-index'", specifier = ">=0.12.11,<0.13" },
{ name = "matplotlib", specifier = ">=3.8.3,<4" },
{ name = "matplotlib", marker = "extra == 'evals'", specifier = ">=3.8.3,<4" },
{ name = "mistral-common", marker = "extra == 'mistral'", specifier = ">=1.5.2,<2" },
{ name = "mistralai", specifier = ">=1.9.10" },
{ name = "mkdocs-material", marker = "extra == 'dev'", specifier = ">=9.5.42,<10" },
@ -1019,41 +1020,37 @@ requires-dist = [
{ name = "nbformat", specifier = ">=5.7.0,<6.0.0" },
{ name = "neo4j", marker = "extra == 'neo4j'", specifier = ">=5.28.0,<6" },
{ name = "networkx", specifier = ">=3.4.2,<4" },
{ name = "nltk", specifier = ">=3.9.1,<4.0.0" },
{ name = "notebook", marker = "extra == 'dev'", specifier = ">=7.1.0,<8" },
{ name = "notebook", marker = "extra == 'notebook'", specifier = ">=7.1.0,<8" },
{ name = "numpy", specifier = ">=1.26.4,<=4.0.0" },
{ name = "onnxruntime", specifier = ">=1.0.0,<2.0.0" },
{ name = "openai", specifier = ">=1.80.1,<2.0.0" },
{ name = "pandas", specifier = ">=2.2.2,<3.0.0" },
{ name = "onnxruntime", specifier = "<=1.22.1" },
{ name = "openai", specifier = ">=1.80.1" },
{ name = "pandas", marker = "extra == 'evals'", specifier = ">=2.2.2,<3.0.0" },
{ name = "pgvector", marker = "extra == 'postgres'", specifier = ">=0.3.5,<0.4" },
{ name = "pgvector", marker = "extra == 'postgres-binary'", specifier = ">=0.3.5,<0.4" },
{ name = "plotly", marker = "extra == 'evals'", specifier = ">=6.0.0,<7" },
{ name = "posthog", marker = "extra == 'posthog'", specifier = ">=3.5.0,<4" },
{ name = "pre-commit", specifier = ">=4.0.1,<5" },
{ name = "pre-commit", marker = "extra == 'dev'", specifier = ">=4.0.1,<5" },
{ name = "psycopg2", marker = "extra == 'postgres'", specifier = ">=2.9.10,<3" },
{ name = "psycopg2-binary", marker = "extra == 'postgres-binary'", specifier = ">=2.9.10,<3.0.0" },
{ name = "pydantic", specifier = ">=2.10.5,<3.0.0" },
{ name = "pydantic-settings", specifier = ">=2.2.1,<3" },
{ name = "pylance", specifier = ">=0.22.0,<1.0.0" },
{ name = "pylance", specifier = ">=0.22.0,<=0.36.0" },
{ name = "pylint", marker = "extra == 'dev'", specifier = ">=3.0.3,<4" },
{ name = "pympler", specifier = ">=1.1,<2.0.0" },
{ name = "pypdf", specifier = ">=4.1.0,<7.0.0" },
{ name = "pypika", marker = "extra == 'chromadb'", specifier = "==0.48.9" },
{ name = "pyside6", marker = "extra == 'gui'", specifier = ">=6.8.3,<7" },
{ name = "pytest", marker = "extra == 'dev'", specifier = ">=7.4.0,<8" },
{ name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.21.1,<0.22" },
{ name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=6.1.1,<7.0.0" },
{ name = "python-dotenv", specifier = ">=1.0.1,<2.0.0" },
{ name = "python-magic-bin", marker = "sys_platform == 'win32'", specifier = "<0.5" },
{ name = "python-multipart", specifier = ">=0.0.20,<1.0.0" },
{ name = "qasync", marker = "extra == 'gui'", specifier = ">=0.27.1,<0.28" },
{ name = "rdflib", specifier = ">=7.1.4,<7.2.0" },
{ name = "ruff", marker = "extra == 'dev'", specifier = ">=0.9.2,<1.0.0" },
{ name = "s3fs", extras = ["boto3"], specifier = "==2025.3.2" },
{ name = "ruff", marker = "extra == 'dev'", specifier = ">=0.9.2,<=0.13.1" },
{ name = "s3fs", extras = ["boto3"], marker = "extra == 'aws'", specifier = "==2025.3.2" },
{ name = "scikit-learn", specifier = ">=1.6.1,<2" },
{ name = "sentry-sdk", extras = ["fastapi"], specifier = ">=2.9.0,<3" },
{ name = "scikit-learn", marker = "extra == 'evals'", specifier = ">=1.6.1,<2" },
{ name = "sentry-sdk", extras = ["fastapi"], marker = "extra == 'monitoring'", specifier = ">=2.9.0,<3" },
{ name = "sqlalchemy", specifier = ">=2.0.39,<3.0.0" },
{ name = "structlog", specifier = ">=25.2.0,<26" },
{ name = "tiktoken", specifier = ">=0.8.0,<1.0.0" },
@ -1068,6 +1065,7 @@ requires-dist = [
{ name = "uvicorn", specifier = ">=0.34.0,<1.0.0" },
{ name = "websockets", specifier = ">=15.0.1,<16.0.0" },
]
provides-extras = ["api", "distributed", "neo4j", "neptune", "postgres", "postgres-binary", "notebook", "langchain", "llama-index", "gemini", "huggingface", "ollama", "mistral", "anthropic", "deepeval", "posthog", "falkordb", "groq", "chromadb", "docs", "codegraph", "evals", "graphiti", "aws", "dlt", "baml", "dev", "debug", "monitoring"]
[[package]]
name = "colorama"
@ -1177,13 +1175,13 @@ version = "1.3.3"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version >= '3.13' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version >= '3.13' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version >= '3.13' and sys_platform == 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and sys_platform == 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and sys_platform == 'emscripten'",
]
dependencies = [
@ -1588,7 +1586,7 @@ wheels = [
[[package]]
name = "dlt"
version = "1.16.0"
version = "1.17.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
@ -1619,9 +1617,9 @@ dependencies = [
{ name = "tzdata" },
{ name = "win-precise-time", marker = "python_full_version < '3.13' and os_name == 'nt'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/47/45/8f32b8cc4c709c79edc54763ab0e5f62df55a17bfaf8c31e2d2538422e34/dlt-1.16.0.tar.gz", hash = "sha256:113d17a3f27aa4f41c3438b0b032a68d30db195d8415a471ba43a9502e971a21", size = 809187 }
sdist = { url = "https://files.pythonhosted.org/packages/af/2e/959f7ba35a710c4128bd7ff329038af35791c77346beeeb16c6d3e2bd87c/dlt-1.17.0.tar.gz", hash = "sha256:7a41a360f0cfeb155da1da3cdd51edb3749a4378d99bb6d1d0a26c7ef471198d", size = 823873, upload-time = "2025-09-24T10:54:00.95Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c6/1c/0a96ced9fb52e859b44624cc86ace5f59324ca899ac7e5a5cfeb1f1c797c/dlt-1.16.0-py3-none-any.whl", hash = "sha256:882ef281bbdc32eaba3b5ced984a8ed7014d8978fd7ab4a58b198023c8938c9f", size = 1029963 },
{ url = "https://files.pythonhosted.org/packages/e1/ed/86aa307dd764ed56c2ab32fa7842a9b5a3a1b3a3b4709307756fd9ac756b/dlt-1.17.0-py3-none-any.whl", hash = "sha256:e5da62695cfbd37f2569b16af3e50582dc2ede2af5c1c014ded7bb7c089de5b1", size = 1048344, upload-time = "2025-09-24T10:53:58.642Z" },
]
[package.optional-dependencies]
@ -1864,11 +1862,11 @@ wheels = [
[[package]]
name = "flatbuffers"
version = "25.2.10"
version = "25.9.23"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e4/30/eb5dce7994fc71a2f685d98ec33cc660c0a5887db5610137e60d8cbc4489/flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e", size = 22170 }
sdist = { url = "https://files.pythonhosted.org/packages/9d/1f/3ee70b0a55137442038f2a33469cc5fddd7e0ad2abf83d7497c18a2b6923/flatbuffers-25.9.23.tar.gz", hash = "sha256:676f9fa62750bb50cf531b42a0a2a118ad8f7f797a511eda12881c016f093b12", size = 22067, upload-time = "2025-09-24T05:25:30.106Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b8/25/155f9f080d5e4bc0082edfda032ea2bc2b8fab3f4d25d46c1e9dd22a1a89/flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051", size = 30953 },
{ url = "https://files.pythonhosted.org/packages/ee/1b/00a78aa2e8fbd63f9af08c9c19e6deb3d5d66b4dda677a0f61654680ee89/flatbuffers-25.9.23-py2.py3-none-any.whl", hash = "sha256:255538574d6cb6d0a79a17ec8bc0d30985913b87513a01cce8bcdb6b4c44d0e2", size = 30869, upload-time = "2025-09-24T05:25:28.912Z" },
]
[[package]]
@ -2123,7 +2121,7 @@ grpc = [
[[package]]
name = "google-api-python-client"
version = "2.182.0"
version = "2.183.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "google-api-core" },
@ -2132,9 +2130,9 @@ dependencies = [
{ name = "httplib2" },
{ name = "uritemplate" },
]
sdist = { url = "https://files.pythonhosted.org/packages/6f/cb/b85b1d7d7fd520739fb70c4878f1f414043c3c34434bc90ba9d4f93366ed/google_api_python_client-2.182.0.tar.gz", hash = "sha256:cb2aa127e33c3a31e89a06f39cf9de982db90a98dee020911b21013afafad35f", size = 13599318 }
sdist = { url = "https://files.pythonhosted.org/packages/fa/1f/49a2c83fc6dcd8b127cc9efbecf7d5fc36109c2028ba22ed6cb4d072fca4/google_api_python_client-2.183.0.tar.gz", hash = "sha256:abae37e04fecf719388e5c02f707ed9cdf952f10b217c79a3e76c636762e3ea9", size = 13645623, upload-time = "2025-09-23T22:27:00.854Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/29/76dabe97ebb710ca9a308f0415b2206e37d149983ec2becbf66525c52322/google_api_python_client-2.182.0-py3-none-any.whl", hash = "sha256:a9b071036d41a17991d8fbf27bedb61f2888a39ae5696cb5a326bf999b2d5209", size = 14168745 },
{ url = "https://files.pythonhosted.org/packages/ab/06/1974f937172854bc7622eff5c2390f33542ceb843f305922922c8f5f7f17/google_api_python_client-2.183.0-py3-none-any.whl", hash = "sha256:2005b6e86c27be1db1a43f43e047a0f8e004159f3cceddecb08cf1624bddba31", size = 14214837, upload-time = "2025-09-23T22:26:57.758Z" },
]
[[package]]
@ -2549,7 +2547,7 @@ wheels = [
[[package]]
name = "huggingface-hub"
version = "0.35.0"
version = "0.35.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "filelock" },
@ -2561,9 +2559,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/37/79/d71d40efa058e8c4a075158f8855bc2998037b5ff1c84f249f34435c1df7/huggingface_hub-0.35.0.tar.gz", hash = "sha256:ccadd2a78eef75effff184ad89401413629fabc52cefd76f6bbacb9b1c0676ac", size = 461486 }
sdist = { url = "https://files.pythonhosted.org/packages/f6/42/0e7be334a6851cd7d51cc11717cb95e89333ebf0064431c0255c56957526/huggingface_hub-0.35.1.tar.gz", hash = "sha256:3585b88c5169c64b7e4214d0e88163d4a709de6d1a502e0cd0459e9ee2c9c572", size = 461374, upload-time = "2025-09-23T13:43:47.074Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fe/85/a18508becfa01f1e4351b5e18651b06d210dbd96debccd48a452acccb901/huggingface_hub-0.35.0-py3-none-any.whl", hash = "sha256:f2e2f693bca9a26530b1c0b9bcd4c1495644dad698e6a0060f90e22e772c31e9", size = 563436 },
{ url = "https://files.pythonhosted.org/packages/f1/60/4acf0c8a3925d9ff491dc08fe84d37e09cfca9c3b885e0db3d4dedb98cea/huggingface_hub-0.35.1-py3-none-any.whl", hash = "sha256:2f0e2709c711e3040e31d3e0418341f7092910f1462dd00350c4e97af47280a8", size = 563340, upload-time = "2025-09-23T13:43:45.343Z" },
]
[[package]]
@ -2734,13 +2732,13 @@ version = "9.5.0"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version >= '3.13' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version >= '3.13' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version >= '3.13' and sys_platform == 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and sys_platform == 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and sys_platform == 'emscripten'",
]
dependencies = [
@ -3308,7 +3306,7 @@ wheels = [
[[package]]
name = "lance-namespace-urllib3-client"
version = "0.0.14"
version = "0.0.15"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pydantic" },
@ -3316,14 +3314,14 @@ dependencies = [
{ name = "typing-extensions" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/43/09/727f5749da387a16ffd342339d859073e950ae451f66554bfba8e8adac71/lance_namespace_urllib3_client-0.0.14.tar.gz", hash = "sha256:911c6a3b5c2c98f4239b6d96609cf840e740c3af5482f5fb22096afb9db1dc1c", size = 134488 }
sdist = { url = "https://files.pythonhosted.org/packages/a8/14/023f12f2d1e624965a361b535b94cc65dfd949d7325e85372f3eb1c75a95/lance_namespace_urllib3_client-0.0.15.tar.gz", hash = "sha256:27a7bf3add1c03ed5e9ccbf83632b2d5468c4d0e1d2fd7a7fe612d9e70934113", size = 134497, upload-time = "2025-09-24T05:46:10.2Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ca/90/ceb58b9a9f3aca0af1c294d71115ee9d44d6d82e0c9dc57d6743574d6358/lance_namespace_urllib3_client-0.0.14-py3-none-any.whl", hash = "sha256:40277cfcf7c9084419c2784e7924b3e316f6fe5b8057f4dc62a49f3b40c2d80c", size = 229639 },
{ url = "https://files.pythonhosted.org/packages/e7/7d/76f92398313658be01b982f29fb2407bf2ed0f920b49d00628b97618ee96/lance_namespace_urllib3_client-0.0.15-py3-none-any.whl", hash = "sha256:ea931c557489002bff212a21f3929827c8ad9cb7c626747714e120a47698ffdd", size = 229640, upload-time = "2025-09-24T05:46:08.795Z" },
]
[[package]]
name = "lancedb"
version = "0.25.0"
version = "0.25.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "deprecation" },
@ -3337,13 +3335,13 @@ dependencies = [
{ name = "tqdm" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/a2/e7/10953deea89b06ae5bc568169d5ae888ff6df314decb92b9b3e453f53f0b/lancedb-0.25.0-cp39-abi3-macosx_10_15_x86_64.whl", hash = "sha256:ae2e80b7b3be3fa4d92fc8d500f47549dd1f8d28ca5092f1c898b92d0cfd4393", size = 34171227 },
{ url = "https://files.pythonhosted.org/packages/55/7f/2874a3709f1b8c487e707e171c9004a9240af3af0fd7a247b9187bb6e0f7/lancedb-0.25.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:a9d67ea9edffa596c6f190151fdd535da8e355a4fd1979c1dc19d540a5665916", size = 31552856 },
{ url = "https://files.pythonhosted.org/packages/e3/e9/faab70ad918576ed3bb7cb936474137ac265ac3026d3e16e30cd4d3daac2/lancedb-0.25.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8fe20079ed86b1ab75c65dcfc920a9646c835e9c40ef825cadd148c11b0001e", size = 32487962 },
{ url = "https://files.pythonhosted.org/packages/ce/40/5471bc8115f287040b5afdf9d7a20c4685ec16cddb4a7da79e7c1f63914e/lancedb-0.25.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b37bc402d85c83e454d9f2e79480b31acc5904bb159a4fc715032c7560494157", size = 35726794 },
{ url = "https://files.pythonhosted.org/packages/47/5e/aa3d9d2c7a834a9aa539b2b1c731ab860f7e32e2c87b9086ad233ecb13cd/lancedb-0.25.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f9bbc20bd1e64be359ca11c90428c00b0062d26b0291bddf32ab5471a3525c76", size = 32492508 },
{ url = "https://files.pythonhosted.org/packages/fa/37/75f4e3ed7fa00a2cd5d321e8bf13441cdb61a83fbbcd0fa0f1a7241affe1/lancedb-0.25.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1306be9c08e208a5bcb5188275f47f962c2eda96369fad5949a3ddaf592afc6d", size = 35776383 },
{ url = "https://files.pythonhosted.org/packages/b5/af/eb217ea1daab5c28ce4c764d2f672f4e3a5bcd3d4faf7921a8ee28c6cb5b/lancedb-0.25.0-cp39-abi3-win_amd64.whl", hash = "sha256:f66283e5d63c99c2bfbd4eaa134d9a5c5b0145eb26a972648214f8ba87777e24", size = 37826272 },
{ url = "https://files.pythonhosted.org/packages/ad/2b/ed9870288506d8ca61cddf7b1dbb03c68f95b8797feb49467b33ef185477/lancedb-0.25.1-cp39-abi3-macosx_10_15_x86_64.whl", hash = "sha256:ec0a1cab435a5307054b84ffb798a4d828253f23698848788bfe31930e343c6c", size = 34985432, upload-time = "2025-09-23T23:15:56.558Z" },
{ url = "https://files.pythonhosted.org/packages/58/75/320f9142918b646b4b6d0277676c2466d2e0ce2a22aca320d0113b3ef035/lancedb-0.25.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:69e1f8343f6a4ff6985ea13f5c5cdf6d07435d04f8279c4fc6e623a34ceadda0", size = 31993179, upload-time = "2025-09-23T22:20:23.039Z" },
{ url = "https://files.pythonhosted.org/packages/fd/44/d223cb64c9feb78dfa3857690d743e961f76e065935c8c4304cb64659882/lancedb-0.25.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9432134155474e73907fc5e1f8a4310433b9234a0c5f964c21b4c39aca50dde6", size = 32872519, upload-time = "2025-09-23T22:29:03.5Z" },
{ url = "https://files.pythonhosted.org/packages/61/a6/e6d88d8076fa8c40b7b6f96a37f21c75ce3518ccbf64a351d26ae983461a/lancedb-0.25.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955c6e1aa4e249be7456ea7f7c42ba119be5a5c2c51f4d78efeb6c4f3cc2dbdf", size = 36325984, upload-time = "2025-09-23T22:31:46.118Z" },
{ url = "https://files.pythonhosted.org/packages/97/84/14d4f0c3a98a324fcb401161e25fb1699c69ba1cd2928983fb283bd8b04f/lancedb-0.25.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d584bdfb96372c03a209bb8f010eb7358135e4adddb903ae1385450af39e1187", size = 32883704, upload-time = "2025-09-23T22:27:41.393Z" },
{ url = "https://files.pythonhosted.org/packages/68/10/3e8ae8bf9880b2fed10122cef5e535bd67f0df0a874cc3122220d47ca255/lancedb-0.25.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c495da53d3dfa105364f202710d0bb2f031fe54a077b9c2ac9d098d02bd20bb2", size = 36369514, upload-time = "2025-09-23T22:30:53.605Z" },
{ url = "https://files.pythonhosted.org/packages/0d/fb/dce4757f257cb4e11e13b71ce502dc5d1caf51f1e5cccfdae85bf23960a0/lancedb-0.25.1-cp39-abi3-win_amd64.whl", hash = "sha256:2c6effc10c8263ea84261f49d5ff1957c18814ed7e3eaa5094d71b1aa0573871", size = 38390878, upload-time = "2025-09-23T22:55:24.687Z" },
]
[[package]]
@ -3422,7 +3420,7 @@ wheels = [
[[package]]
name = "langsmith"
version = "0.4.30"
version = "0.4.31"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
@ -3433,18 +3431,18 @@ dependencies = [
{ name = "requests-toolbelt" },
{ name = "zstandard" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fe/d5/4cc88f246ce615a518a715cd2bf40001d1678ad6805a3706a90570adca8f/langsmith-0.4.30.tar.gz", hash = "sha256:388fe1060aca6507be41f417c7d4168a92dffe27f28bb6ef8a1bfee4a59f3681", size = 958857 }
sdist = { url = "https://files.pythonhosted.org/packages/55/f5/edbdf89a162ee025348b3b2080fb3b88f4a1040a5a186f32d34aca913994/langsmith-0.4.31.tar.gz", hash = "sha256:5fb3729e22bd9a225391936cb9d1080322e6c375bb776514af06b56d6c46ed3e", size = 959698, upload-time = "2025-09-25T04:18:19.55Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ae/d1/b2b2ea7b443c6b028aca209d2e653256912906900cc146e64c65201211b7/langsmith-0.4.30-py3-none-any.whl", hash = "sha256:110767eb83e6da2cc99cfc61958631b5c36624758b52e7af35ec5550ad846cb3", size = 386300 },
{ url = "https://files.pythonhosted.org/packages/3e/8e/e7a43d907a147e1f87eebdd6737483f9feba52a5d4b20f69d0bd6f2fa22f/langsmith-0.4.31-py3-none-any.whl", hash = "sha256:64f340bdead21defe5f4a6ca330c11073e35444989169f669508edf45a19025f", size = 386347, upload-time = "2025-09-25T04:18:16.69Z" },
]
[[package]]
name = "lark"
version = "1.3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1d/37/a13baf0135f348af608c667633cbe5d13aa2c5c15a56ae9ad3e6cba45ae3/lark-1.3.0.tar.gz", hash = "sha256:9a3839d0ca5e1faf7cfa3460e420e859b66bcbde05b634e73c369c8244c5fa48", size = 259551 }
sdist = { url = "https://files.pythonhosted.org/packages/1d/37/a13baf0135f348af608c667633cbe5d13aa2c5c15a56ae9ad3e6cba45ae3/lark-1.3.0.tar.gz", hash = "sha256:9a3839d0ca5e1faf7cfa3460e420e859b66bcbde05b634e73c369c8244c5fa48", size = 259551, upload-time = "2025-09-22T13:45:05.072Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a8/3e/1c6b43277de64fc3c0333b0e72ab7b52ddaaea205210d60d9b9f83c3d0c7/lark-1.3.0-py3-none-any.whl", hash = "sha256:80661f261fb2584a9828a097a2432efd575af27d20be0fd35d17f0fe37253831", size = 113002 },
{ url = "https://files.pythonhosted.org/packages/a8/3e/1c6b43277de64fc3c0333b0e72ab7b52ddaaea205210d60d9b9f83c3d0c7/lark-1.3.0-py3-none-any.whl", hash = "sha256:80661f261fb2584a9828a097a2432efd575af27d20be0fd35d17f0fe37253831", size = 113002, upload-time = "2025-09-22T13:45:03.747Z" },
]
[[package]]
@ -3463,7 +3461,7 @@ wheels = [
[[package]]
name = "litellm"
version = "1.77.3"
version = "1.77.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohttp" },
@ -3480,9 +3478,9 @@ dependencies = [
{ name = "tiktoken" },
{ name = "tokenizers" },
]
sdist = { url = "https://files.pythonhosted.org/packages/92/86/8bfd372d3d437b773b4b81d6da35674a569c10a9b805409257790e3af271/litellm-1.77.3.tar.gz", hash = "sha256:d8f9d674ef4e7673b1af02428fde27de5a8e84ca7268f003902340586aac7d96", size = 10314535 }
sdist = { url = "https://files.pythonhosted.org/packages/ab/b7/0d3c6dbcff3064238d123f90ae96764a85352f3f5caab6695a55007fd019/litellm-1.77.4.tar.gz", hash = "sha256:ce652e10ecf5b36767bfdf58e53b2802e22c3de383b03554e6ee1a4a66fa743d", size = 10330773, upload-time = "2025-09-24T17:52:44.876Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/81/b2/122602255b582fdcf630f8e44b5c9175391abe10be5e2f4db6a7d4173df1/litellm-1.77.3-py3-none-any.whl", hash = "sha256:f0c8c6bcfa2c9cd9e9fa0304f9a94894d252e7c74f118c37a8f2e4e525b2592b", size = 9118886 },
{ url = "https://files.pythonhosted.org/packages/3c/32/90f8587818d146d604ed6eec95f96378363fda06b14817399cc68853383e/litellm-1.77.4-py3-none-any.whl", hash = "sha256:66c2bb776f1e19ceddfa977a2bbf7f05e6f26c4b1fec8b2093bd171d842701b8", size = 9138493, upload-time = "2025-09-24T17:52:40.764Z" },
]
[[package]]
@ -4445,13 +4443,13 @@ version = "3.5"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version >= '3.13' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version >= '3.13' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version >= '3.13' and sys_platform == 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and sys_platform == 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and sys_platform == 'emscripten'",
]
sdist = { url = "https://files.pythonhosted.org/packages/6c/4f/ccdb8ad3a38e583f214547fd2f7ff1fc160c43a75af88e6aec213404b96a/networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037", size = 2471065 }
@ -4517,10 +4515,10 @@ version = "1.26.4"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version < '3.11' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version < '3.11' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and sys_platform == 'emscripten'",
"python_full_version < '3.11' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version < '3.11' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version < '3.11' and sys_platform == 'emscripten'",
]
sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129 }
@ -4557,10 +4555,10 @@ version = "2.3.3"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version >= '3.13' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version >= '3.13' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version >= '3.13' and sys_platform == 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and sys_platform == 'emscripten'",
]
sdist = { url = "https://files.pythonhosted.org/packages/d0/19/95b3d357407220ed24c139018d2518fab0a61a948e68286a25f1a4d049ff/numpy-2.3.3.tar.gz", hash = "sha256:ddc7c39727ba62b80dfdbedf400d1c10ddfa8eefbd7ec8dcb118be8b56d31029", size = 20576648 }
@ -4638,15 +4636,15 @@ wheels = [
[[package]]
name = "ollama"
version = "0.5.4"
version = "0.6.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
{ name = "pydantic" },
]
sdist = { url = "https://files.pythonhosted.org/packages/72/62/a36be4555e4218d6c8b35e72e0dfe0823845400097275cd81c9aec4ddf39/ollama-0.5.4.tar.gz", hash = "sha256:75857505a5d42e5e58114a1b78cc8c24596d8866863359d8a2329946a9b6d6f3", size = 45233 }
sdist = { url = "https://files.pythonhosted.org/packages/d6/47/f9ee32467fe92744474a8c72e138113f3b529fc266eea76abfdec9a33f3b/ollama-0.6.0.tar.gz", hash = "sha256:da2b2d846b5944cfbcee1ca1e6ee0585f6c9d45a2fe9467cbcd096a37383da2f", size = 50811, upload-time = "2025-09-24T22:46:02.417Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1b/af/d0a23c8fdec4c8ddb771191d9b36a57fbce6741835a78f1b18ab6d15ae7d/ollama-0.5.4-py3-none-any.whl", hash = "sha256:6374c9bb4f2a371b3583c09786112ba85b006516745689c172a7e28af4d4d1a2", size = 13548 },
{ url = "https://files.pythonhosted.org/packages/b5/c1/edc9f41b425ca40b26b7c104c5f6841a4537bb2552bfa6ca66e81405bb95/ollama-0.6.0-py3-none-any.whl", hash = "sha256:534511b3ccea2dff419ae06c3b58d7f217c55be7897c8ce5868dfb6b219cf7a0", size = 14130, upload-time = "2025-09-24T22:46:01.19Z" },
]
[[package]]
@ -4685,7 +4683,7 @@ wheels = [
[[package]]
name = "openai"
version = "1.108.2"
version = "1.109.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@ -4697,9 +4695,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8c/ff/1dc7bec988cfbab80c2b0ee61d5178915117a4f02f68777d528219e84866/openai-1.108.2.tar.gz", hash = "sha256:e6ce793e0ef8a52d343850a5411edbd5b21214f64a8d29cdca0fcb929e8e3155", size = 563898 }
sdist = { url = "https://files.pythonhosted.org/packages/c6/a1/a303104dc55fc546a3f6914c842d3da471c64eec92043aef8f652eb6c524/openai-1.109.1.tar.gz", hash = "sha256:d173ed8dbca665892a6db099b4a2dfac624f94d20a93f46eb0b56aae940ed869", size = 564133, upload-time = "2025-09-24T13:00:53.075Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/13/2d/1cb19289feef689206cd8995d7be6b5d76b7e8d64fd51438acecc11ee395/openai-1.108.2-py3-none-any.whl", hash = "sha256:4c7caa23845a49aebd8f65e01b35739b5b2df5e621d4666cbe55f0973fae81b7", size = 948390 },
{ url = "https://files.pythonhosted.org/packages/1d/2a/7dd3d207ec669cacc1f186fd856a0f61dbc255d24f6fdc1a6715d6051b0f/openai-1.109.1-py3-none-any.whl", hash = "sha256:6bcaf57086cf59159b8e27447e4e7dd019db5d29a438072fbd49c290c7e65315", size = 948627, upload-time = "2025-09-24T13:00:50.754Z" },
]
[[package]]
@ -5813,16 +5811,16 @@ pycountry = [
[[package]]
name = "pydantic-settings"
version = "2.10.1"
version = "2.11.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pydantic" },
{ name = "python-dotenv" },
{ name = "typing-inspection" },
]
sdist = { url = "https://files.pythonhosted.org/packages/68/85/1ea668bbab3c50071ca613c6ab30047fb36ab0da1b92fa8f17bbc38fd36c/pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee", size = 172583 }
sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235 },
{ url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" },
]
[[package]]
@ -5974,54 +5972,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5a/dc/491b7661614ab97483abf2056be1deee4dc2490ecbf7bff9ab5cdbac86e1/pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6", size = 83178 },
]
[[package]]
name = "pyside6"
version = "6.9.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pyside6-addons" },
{ name = "pyside6-essentials" },
{ name = "shiboken6" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/43/42/43577413bd5ab26f5f21e7a43c9396aac158a5d01900c87e4609c0e96278/pyside6-6.9.2-cp39-abi3-macosx_12_0_universal2.whl", hash = "sha256:71245c76bfbe5c41794ffd8546730ec7cc869d4bbe68535639e026e4ef8a7714", size = 558102 },
{ url = "https://files.pythonhosted.org/packages/12/df/cb84f802df3dcc1d196d2f9f37dbb8227761826f936987c9386b8ae1ffcc/pyside6-6.9.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:64a9e2146e207d858e00226f68d7c1b4ab332954742a00dcabb721bb9e4aa0cd", size = 558243 },
{ url = "https://files.pythonhosted.org/packages/94/2d/715db9da437b4632d06e2c4718aee9937760b84cf36c23d5441989e581b0/pyside6-6.9.2-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:a78fad16241a1f2ed0fa0098cf3d621f591fc75b4badb7f3fa3959c9d861c806", size = 558245 },
{ url = "https://files.pythonhosted.org/packages/59/90/2e75cbff0e17f16b83d2b7e8434ae9175cae8d6ff816c9b56d307cf53c86/pyside6-6.9.2-cp39-abi3-win_amd64.whl", hash = "sha256:d1afbf48f9a5612b9ee2dc7c384c1a65c08b5830ba5e7d01f66d82678e5459df", size = 564604 },
{ url = "https://files.pythonhosted.org/packages/dc/34/e3dd4e046673efcbcfbe0aa2760df06b2877739b8f4da60f0229379adebd/pyside6-6.9.2-cp39-abi3-win_arm64.whl", hash = "sha256:1499b1d7629ab92119118e2636b4ace836b25e457ddf01003fdca560560b8c0a", size = 401833 },
]
[[package]]
name = "pyside6-addons"
version = "6.9.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pyside6-essentials" },
{ name = "shiboken6" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/47/39/a8f4a55001b6a0aaee042e706de2447f21c6dc2a610f3d3debb7d04db821/pyside6_addons-6.9.2-cp39-abi3-macosx_12_0_universal2.whl", hash = "sha256:7019fdcc0059626eb1608b361371f4dc8cb7f2d02f066908fd460739ff5a07cd", size = 316693692 },
{ url = "https://files.pythonhosted.org/packages/14/48/0b16e9dabd4cafe02d59531832bc30b6f0e14c92076e90dd02379d365cb2/pyside6_addons-6.9.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:24350e5415317f269e743d1f7b4933fe5f59d90894aa067676c9ce6bfe9e7988", size = 166984613 },
{ url = "https://files.pythonhosted.org/packages/f4/55/dc42a73387379bae82f921b7659cd2006ec0e80f7052f83ddc07e9eb9cca/pyside6_addons-6.9.2-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:af8dee517de8d336735a6543f7dd496eb580e852c14b4d2304b890e2a29de499", size = 162908466 },
{ url = "https://files.pythonhosted.org/packages/14/fa/396a2e86230c493b565e2dc89dc64e4b1c63582ac69afe77b693c3817a53/pyside6_addons-6.9.2-cp39-abi3-win_amd64.whl", hash = "sha256:98d2413904ee4b2b754b077af7875fa6ec08468c01a6628a2c9c3d2cece4874f", size = 160216647 },
{ url = "https://files.pythonhosted.org/packages/a7/fe/25f61259f1d5ec4648c9f6d2abd8e2cba2188f10735a57abafda719958e5/pyside6_addons-6.9.2-cp39-abi3-win_arm64.whl", hash = "sha256:b430cae782ff1a99fb95868043557f22c31b30c94afb9cf73278584e220a2ab6", size = 27126649 },
]
[[package]]
name = "pyside6-essentials"
version = "6.9.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "shiboken6" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/08/21/41960c03721a99e7be99a96ebb8570bdfd6f76f512b5d09074365e27ce28/pyside6_essentials-6.9.2-cp39-abi3-macosx_12_0_universal2.whl", hash = "sha256:713eb8dcbb016ff10e6fca129c1bf2a0fd8cfac979e689264e0be3b332f9398e", size = 133092348 },
{ url = "https://files.pythonhosted.org/packages/3e/02/e38ff18f3d2d8d3071aa6823031aad6089267aa4668181db65ce9948bfc0/pyside6_essentials-6.9.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:84b8ca4fa56506e2848bdb4c7a0851a5e7adcb916bef9bce25ce2eeb6c7002cc", size = 96569791 },
{ url = "https://files.pythonhosted.org/packages/9a/a1/1203d4db6919b42a937d9ac5ddb84b20ea42eb119f7c1ddeb77cb8fdb00c/pyside6_essentials-6.9.2-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:d0f701503974bd51b408966539aa6956f3d8536e547ea8002fbfb3d77796bbc3", size = 94311809 },
{ url = "https://files.pythonhosted.org/packages/a8/e3/3b3e869d3e332b6db93f6f64fac3b12f5c48b84f03f2aa50ee5c044ec0de/pyside6_essentials-6.9.2-cp39-abi3-win_amd64.whl", hash = "sha256:b2f746f795138ac63eb173f9850a6db293461a1b6ce22cf6dafac7d194a38951", size = 72624566 },
{ url = "https://files.pythonhosted.org/packages/91/70/db78afc8b60b2e53f99145bde2f644cca43924a4dd869ffe664e0792730a/pyside6_essentials-6.9.2-cp39-abi3-win_arm64.whl", hash = "sha256:ecd7b5cd9e271f397fb89a6357f4ec301d8163e50869c6c557f9ccc6bed42789", size = 49561720 },
]
[[package]]
name = "pysocks"
version = "1.7.1"
@ -6380,15 +6330,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/01/1b/5dbe84eefc86f48473947e2f41711aded97eecef1231f4558f1f02713c12/pyzmq-27.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c9f7f6e13dff2e44a6afeaf2cf54cee5929ad64afaf4d40b50f93c58fc687355", size = 544862 },
]
[[package]]
name = "qasync"
version = "0.27.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1c/e0/7c7c973f52e1765d6ddfc41e9272294f65d5d52b8f5f5eae92adf411ad46/qasync-0.27.1.tar.gz", hash = "sha256:8dc768fd1ee5de1044c7c305eccf2d39d24d87803ea71189d4024fb475f4985f", size = 14287 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/51/06/bc628aa2981bcfd452a08ee435b812fd3eee4ada8acb8a76c4a09d1a5a77/qasync-0.27.1-py3-none-any.whl", hash = "sha256:5d57335723bc7d9b328dadd8cb2ed7978640e4bf2da184889ce50ee3ad2602c7", size = 14866 },
]
[[package]]
name = "rapidfuzz"
version = "3.14.1"
@ -7007,13 +6948,13 @@ version = "1.16.2"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version >= '3.13' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version >= '3.13' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version >= '3.13' and sys_platform == 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.12.*' and sys_platform == 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation != 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and platform_python_implementation == 'PyPy' and sys_platform != 'emscripten'",
"python_full_version == '3.11.*' and sys_platform == 'emscripten'",
]
dependencies = [
@ -7084,15 +7025,15 @@ wheels = [
[[package]]
name = "sentry-sdk"
version = "2.38.0"
version = "2.39.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b2/22/60fd703b34d94d216b2387e048ac82de3e86b63bc28869fb076f8bb0204a/sentry_sdk-2.38.0.tar.gz", hash = "sha256:792d2af45e167e2f8a3347143f525b9b6bac6f058fb2014720b40b84ccbeb985", size = 348116 }
sdist = { url = "https://files.pythonhosted.org/packages/4c/72/43294fa4bdd75c51610b5104a3ff834459ba653abb415150aa7826a249dd/sentry_sdk-2.39.0.tar.gz", hash = "sha256:8c185854d111f47f329ab6bc35993f28f7a6b7114db64aa426b326998cfa14e9", size = 348556, upload-time = "2025-09-25T09:15:39.064Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7a/84/bde4c4bbb269b71bc09316af8eb00da91f67814d40337cc12ef9c8742541/sentry_sdk-2.38.0-py2.py3-none-any.whl", hash = "sha256:2324aea8573a3fa1576df7fb4d65c4eb8d9929c8fa5939647397a07179eef8d0", size = 370346 },
{ url = "https://files.pythonhosted.org/packages/dd/44/4356cc64246ba7b2b920f7c97a85c3c52748e213e250b512ee8152eb559d/sentry_sdk-2.39.0-py2.py3-none-any.whl", hash = "sha256:ba655ca5e57b41569b18e2a5552cb3375209760a5d332cdd87c6c3f28f729602", size = 370851, upload-time = "2025-09-25T09:15:36.35Z" },
]
[package.optional-dependencies]
@ -7118,18 +7059,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 },
]
[[package]]
name = "shiboken6"
version = "6.9.2"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1a/1e/62a8757aa0aa8d5dbf876f6cb6f652a60be9852e7911b59269dd983a7fb5/shiboken6-6.9.2-cp39-abi3-macosx_12_0_universal2.whl", hash = "sha256:8bb1c4326330e53adeac98bfd9dcf57f5173a50318a180938dcc4825d9ca38da", size = 406337 },
{ url = "https://files.pythonhosted.org/packages/3b/bb/72a8ed0f0542d9ea935f385b396ee6a4bbd94749c817cbf2be34e80a16d3/shiboken6-6.9.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3b54c0a12ea1b03b9dc5dcfb603c366e957dc75341bf7cb1cc436d0d848308ee", size = 206733 },
{ url = "https://files.pythonhosted.org/packages/52/c4/09e902f5612a509cef2c8712c516e4fe44f3a1ae9fcd8921baddb5e6bae4/shiboken6-6.9.2-cp39-abi3-manylinux_2_39_aarch64.whl", hash = "sha256:a5f5985938f5acb604c23536a0ff2efb3cccb77d23da91fbaff8fd8ded3dceb4", size = 202784 },
{ url = "https://files.pythonhosted.org/packages/a4/ea/a56b094a4bf6facf89f52f58e83684e168b1be08c14feb8b99969f3d4189/shiboken6-6.9.2-cp39-abi3-win_amd64.whl", hash = "sha256:68c33d565cd4732be762d19ff67dfc53763256bac413d392aa8598b524980bc4", size = 1152089 },
{ url = "https://files.pythonhosted.org/packages/48/64/562a527fc55fbf41fa70dae735929988215505cb5ec0809fb0aef921d4a0/shiboken6-6.9.2-cp39-abi3-win_arm64.whl", hash = "sha256:c5b827797b3d89d9b9a3753371ff533fcd4afc4531ca51a7c696952132098054", size = 1708948 },
]
[[package]]
name = "sigtools"
version = "4.0.1"
@ -7291,11 +7220,11 @@ asyncio = [
[[package]]
name = "sqlglot"
version = "27.16.3"
version = "27.18.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/8f/a9/c50f262c5b1a4ca110a110b3ea2312e9a2febee19c21f849389501d08c5c/sqlglot-27.16.3.tar.gz", hash = "sha256:bf5cc3b7c90c3682365353a318089e69e859939943d7882562ba39be650a6202", size = 5471626 }
sdist = { url = "https://files.pythonhosted.org/packages/01/56/ef3c705c750b262d4cca787982e3b640d46e8b40ba06d1d5577fb1bb4d76/sqlglot-27.18.0.tar.gz", hash = "sha256:6901eb1805226f38852e53c473d67c8e13bacf11ffe5d90170874f3d7b199acf", size = 5478250, upload-time = "2025-09-25T10:31:47.423Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/48/16/3ef8601ebef21b0971179242589d9811e68c9032c22f1910e3a68fe698ff/sqlglot-27.16.3-py3-none-any.whl", hash = "sha256:3765ef1da6c9a04dd9e9ab4bcf24ca54daae72d86d693954aed84dbbbff2ff3b", size = 518006 },
{ url = "https://files.pythonhosted.org/packages/ca/cc/110103f57c249bed40e85bd069ec3d2c29250ea228c89ed9d161924bb44d/sqlglot-27.18.0-py3-none-any.whl", hash = "sha256:71c122ddc99128347e1cd9ad6ed08b378cdf233e27eb78abce7542f47c723edd", size = 520293, upload-time = "2025-09-25T10:31:44.927Z" },
]
[[package]]
@ -7662,7 +7591,7 @@ wheels = [
[[package]]
name = "typer"
version = "0.19.1"
version = "0.19.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
@ -7670,9 +7599,9 @@ dependencies = [
{ name = "shellingham" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/03/ea/9cc57c3c627fd7a6a0907ea371019fe74c3ec00e3cf209a6864140a602ad/typer-0.19.1.tar.gz", hash = "sha256:cb881433a4b15dacc875bb0583d1a61e78497806741f9aba792abcab390c03e6", size = 104802 }
sdist = { url = "https://files.pythonhosted.org/packages/21/ca/950278884e2ca20547ff3eb109478c6baf6b8cf219318e6bc4f666fad8e8/typer-0.19.2.tar.gz", hash = "sha256:9ad824308ded0ad06cc716434705f691d4ee0bfd0fb081839d2e426860e7fdca", size = 104755, upload-time = "2025-09-23T09:47:48.256Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1e/fa/6473c00b5eb26a2ba427813107699d3e6f4e1a4afad3f7494b17bdef3422/typer-0.19.1-py3-none-any.whl", hash = "sha256:914b2b39a1da4bafca5f30637ca26fa622a5bf9f515e5fdc772439f306d5682a", size = 46876 },
{ url = "https://files.pythonhosted.org/packages/00/22/35617eee79080a5d071d0f14ad698d325ee6b3bf824fc0467c03b30e7fa8/typer-0.19.2-py3-none-any.whl", hash = "sha256:755e7e19670ffad8283db353267cb81ef252f595aa6834a0d1ca9312d9326cb9", size = 46748, upload-time = "2025-09-23T09:47:46.777Z" },
]
[[package]]
@ -7885,16 +7814,16 @@ wheels = [
[[package]]
name = "uvicorn"
version = "0.36.0"
version = "0.37.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
{ name = "h11" },
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ef/5e/f0cd46063a02fd8515f0e880c37d2657845b7306c16ce6c4ffc44afd9036/uvicorn-0.36.0.tar.gz", hash = "sha256:527dc68d77819919d90a6b267be55f0e76704dca829d34aea9480be831a9b9d9", size = 80032 }
sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/96/06/5cc0542b47c0338c1cb676b348e24a1c29acabc81000bced518231dded6f/uvicorn-0.36.0-py3-none-any.whl", hash = "sha256:6bb4ba67f16024883af8adf13aba3a9919e415358604ce46780d3f9bdc36d731", size = 67675 },
{ url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" },
]
[package.optional-dependencies]
@ -8071,9 +8000,9 @@ wheels = [
name = "wcwidth"
version = "0.2.14"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293 }
sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286 },
{ url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" },
]
[[package]]