Compare commits

...
Sign in to create a new pull request.

1 commit

Author SHA1 Message Date
vasilije
a276630272 remove networkx 2025-07-13 22:32:03 +02:00
14 changed files with 167 additions and 122 deletions

View file

@ -130,7 +130,7 @@ async def add(
- DEFAULT_USER_EMAIL: Custom default user email
- DEFAULT_USER_PASSWORD: Custom default user password
- VECTOR_DB_PROVIDER: "lancedb" (default), "chromadb", "qdrant", "weaviate"
- GRAPH_DATABASE_PROVIDER: "kuzu" (default), "neo4j", "networkx"
- GRAPH_DATABASE_PROVIDER: "kuzu" (default), "neo4j"
Raises:
FileNotFoundError: If specified file paths don't exist

View file

@ -21,10 +21,6 @@ async def get_graph_engine() -> GraphDBInterface:
if hasattr(graph_client, "initialize"):
await graph_client.initialize()
# Handle loading of graph for NetworkX
if config["graph_database_provider"].lower() == "networkx" and graph_client.graph is None:
await graph_client.load_graph_from_file()
return graph_client
@ -135,8 +131,4 @@ def create_graph_engine(
graph_database_password=graph_database_password or None,
)
from .networkx.adapter import NetworkXAdapter
graph_client = NetworkXAdapter(filename=graph_file_path)
return graph_client
raise EnvironmentError(f"Unsupported graph database provider: {graph_database_provider}")

View file

@ -1,6 +1,5 @@
from typing import Any, Optional
from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.infrastructure.databases.graph.networkx.adapter import NetworkXAdapter
from cognee.modules.retrieval.base_retriever import BaseRetriever
from cognee.modules.retrieval.utils.completion import generate_completion
from cognee.modules.retrieval.exceptions import SearchTypeNotSupported, CypherSearchError
@ -31,8 +30,7 @@ class CypherSearchRetriever(BaseRetriever):
"""
Retrieves relevant context using a cypher query.
If the graph engine is an instance of NetworkXAdapter, raises SearchTypeNotSupported. If
any error occurs during execution, logs the error and raises CypherSearchError.
If any error occurs during execution, logs the error and raises CypherSearchError.
Parameters:
-----------
@ -47,11 +45,6 @@ class CypherSearchRetriever(BaseRetriever):
try:
graph_engine = await get_graph_engine()
if isinstance(graph_engine, NetworkXAdapter):
raise SearchTypeNotSupported(
"CYPHER search type not supported for NetworkXAdapter."
)
result = await graph_engine.query(query)
except Exception as e:
logger.error("Failed to execture cypher search retrieval: %s", str(e))

View file

@ -1,7 +1,6 @@
from typing import Any, Optional
import logging
from cognee.infrastructure.databases.graph import get_graph_engine
from cognee.infrastructure.databases.graph.networkx.adapter import NetworkXAdapter
from cognee.infrastructure.llm.get_llm_client import get_llm_client
from cognee.infrastructure.llm.prompts import render_prompt
from cognee.modules.retrieval.base_retriever import BaseRetriever
@ -108,9 +107,7 @@ class NaturalLanguageRetriever(BaseRetriever):
"""
Retrieves relevant context using a natural language query converted to Cypher.
This method raises a SearchTypeNotSupported exception if the graph engine does not
support natural language search. It also logs errors if the execution of the retrieval
fails.
This method logs errors if the execution of the retrieval fails.
Parameters:
-----------
@ -126,9 +123,6 @@ class NaturalLanguageRetriever(BaseRetriever):
try:
graph_engine = await get_graph_engine()
if isinstance(graph_engine, (NetworkXAdapter)):
raise SearchTypeNotSupported("Natural language search type not supported.")
return await self._execute_cypher_query(query, graph_engine)
except Exception as e:
logger.error("Failed to execute natural language search retrieval: %s", str(e))

View file

@ -1,6 +1,5 @@
import os
import json
import networkx
from cognee.shared.logging_utils import get_logger
from cognee.infrastructure.files.storage import LocalStorage
@ -11,8 +10,6 @@ logger = get_logger()
async def cognee_network_visualization(graph_data, destination_file_path: str = None):
nodes_data, edges_data = graph_data
G = networkx.DiGraph()
nodes_list = []
color_map = {
"Entity": "#f47710",
@ -44,15 +41,11 @@ async def cognee_network_visualization(graph_data, destination_file_path: str =
pass
nodes_list.append(node_info)
G.add_node(node_id, **node_info)
edge_labels = {}
links_list = []
for source, target, relation, edge_info in edges_data:
source = str(source)
target = str(target)
G.add_edge(source, target)
edge_labels[(source, target)] = relation
links_list.append({"source": source, "target": target, "relation": relation})
html_template = """

View file

@ -284,7 +284,6 @@ class SummarizedCode(BaseModel):
class GraphDBType(Enum):
NETWORKX = auto()
NEO4J = auto()
FALKORDB = auto()
KUZU = auto()

View file

@ -7,7 +7,6 @@ import requests
import hashlib
from datetime import datetime, timezone
import graphistry
import networkx as nx
import pandas as pd
import matplotlib.pyplot as plt
import http.server
@ -146,31 +145,68 @@ async def register_graphistry():
def prepare_edges(graph, source, target, edge_key):
edge_list = [
{
source: str(edge[0]),
target: str(edge[1]),
edge_key: str(edge[2]),
}
for edge in graph.edges(keys=True, data=True)
]
"""
Prepare edges data for visualization.
:param graph: Either a networkx graph or a tuple of (nodes, edges)
:param source: Column name for source node
:param target: Column name for target node
:param edge_key: Column name for edge key/relationship
:return: DataFrame with edge data
"""
edge_list = []
# Handle networkx graph format (for backward compatibility)
if hasattr(graph, "edges"):
edge_list = [
{
source: str(edge[0]),
target: str(edge[1]),
edge_key: str(edge[2]),
}
for edge in graph.edges(keys=True, data=True)
]
# Handle tuple format (nodes, edges)
elif isinstance(graph, tuple) and len(graph) == 2:
nodes, edges = graph
edge_list = [
{
source: str(edge[0]),
target: str(edge[1]),
edge_key: str(edge[2]) if len(edge) > 2 else "relationship",
}
for edge in edges
]
# Handle empty or invalid graph
else:
edge_list = []
return pd.DataFrame(edge_list)
def prepare_nodes(graph, include_size=False):
"""
Prepare nodes data for visualization.
:param graph: Either a networkx graph or a tuple of (nodes, edges)
:param include_size: Whether to include size information
:return: DataFrame with node data
"""
nodes_data = []
for node in graph.nodes:
node_info = graph.nodes[node]
if not node_info:
continue
# Handle networkx graph format (for backward compatibility)
if hasattr(graph, "nodes"):
for node in graph.nodes:
node_info = graph.nodes[node]
node_data = {
**node_info,
"id": str(node),
"name": node_info["name"] if "name" in node_info else str(node),
}
if not node_info:
continue
node_data = {
**node_info,
"id": str(node),
"name": node_info["name"] if "name" in node_info else str(node),
}
if include_size:
default_size = 10 # Default node size
@ -183,6 +219,32 @@ def prepare_nodes(graph, include_size=False):
nodes_data.append(node_data)
# Handle tuple format (nodes, edges)
elif isinstance(graph, tuple) and len(graph) == 2:
nodes, edges = graph
for node_id, node_info in nodes:
if not node_info:
continue
node_data = {
**node_info,
"id": str(node_id),
"name": node_info.get("name", str(node_id)),
}
if include_size:
default_size = 10 # Default node size
larger_size = 20 # Size for nodes with specific keywords in their ID
keywords = ["DOCUMENT", "User"]
node_size = (
larger_size
if any(keyword in str(node_id) for keyword in keywords)
else default_size
)
node_data["size"] = node_size
nodes_data.append(node_data)
return pd.DataFrame(nodes_data)
@ -191,24 +253,18 @@ async def render_graph(
):
await register_graphistry()
if not isinstance(graph, nx.MultiDiGraph):
graph_engine = await get_graph_engine()
networkx_graph = nx.MultiDiGraph()
# Get graph data from the graph engine
graph_engine = await get_graph_engine()
(nodes, edges) = await graph_engine.get_graph_data()
(nodes, edges) = await graph_engine.get_graph_data()
networkx_graph.add_nodes_from(nodes)
networkx_graph.add_edges_from(edges)
graph = networkx_graph
edges = prepare_edges(graph, "source_node", "target_node", "relationship_name")
plotter = graphistry.edges(edges, "source_node", "target_node")
# Convert to dataframes for graphistry
edges_df = prepare_edges(graph, "source_node", "target_node", "relationship_name")
plotter = graphistry.edges(edges_df, "source_node", "target_node")
plotter = plotter.bind(edge_label="relationship_name")
if include_nodes:
nodes = prepare_nodes(graph, include_size=include_size)
plotter = plotter.nodes(nodes, "id")
nodes_df = prepare_nodes(graph, include_size=include_size)
plotter = plotter.nodes(nodes_df, "id")
if include_size:
plotter = plotter.bind(point_size="size")
@ -241,41 +297,19 @@ async def convert_to_serializable_graph(G):
(nodes, edges) = G
networkx_graph = nx.MultiDiGraph()
networkx_graph.add_nodes_from(nodes)
networkx_graph.add_edges_from(edges)
# Create a new graph to store the serializable version
new_G = nx.MultiDiGraph()
# Serialize nodes
for node, data in networkx_graph.nodes(data=True):
# Convert nodes to serializable format
serializable_nodes = []
for node_id, data in nodes:
serializable_data = {k: str(v) for k, v in data.items()}
new_G.add_node(str(node), **serializable_data)
serializable_nodes.append((str(node_id), serializable_data))
# Serialize edges
for u, v, data in networkx_graph.edges(data=True):
# Convert edges to serializable format
serializable_edges = []
for u, v, data in edges:
serializable_data = {k: str(v) for k, v in data.items()}
new_G.add_edge(str(u), str(v), **serializable_data)
serializable_edges.append((str(u), str(v), serializable_data))
return new_G
def generate_layout_positions(G, layout_func, layout_scale):
"""
Generate layout positions for the graph using the specified layout function.
"""
positions = layout_func(G)
return {str(node): (x * layout_scale, y * layout_scale) for node, (x, y) in positions.items()}
def assign_node_colors(G, node_attribute, palette):
"""
Assign colors to nodes based on a specified attribute and a given palette.
"""
unique_attrs = set(G.nodes[node].get(node_attribute, "Unknown") for node in G.nodes)
color_map = {attr: palette[i % len(palette)] for i, attr in enumerate(unique_attrs)}
return [color_map[G.nodes[node].get(node_attribute, "Unknown")] for node in G.nodes], color_map
return (serializable_nodes, serializable_edges)
def embed_logo(p, layout_scale, logo_alpha, position):
@ -309,14 +343,16 @@ def embed_logo(p, layout_scale, logo_alpha, position):
def graph_to_tuple(graph):
"""
Converts a networkx graph to a tuple of (nodes, edges).
Converts a graph to a tuple of (nodes, edges).
:param graph: A networkx graph.
:param graph: A graph represented as a tuple of (nodes, edges).
:return: A tuple (nodes, edges).
"""
nodes = list(graph.nodes(data=True)) # Get nodes with attributes
edges = list(graph.edges(data=True)) # Get edges with attributes
return (nodes, edges)
if isinstance(graph, tuple) and len(graph) == 2:
return graph
# If it's some other format, return empty tuple
return ([], [])
def start_visualization_server(

View file

@ -4,16 +4,18 @@ import asyncio
async def check_graph_metrics_consistency_across_adapters(include_optional=False):
# NetworkX has been moved to the community package
# This test now only uses neo4j and kuzu for consistency checks
neo4j_metrics = await get_metrics(provider="neo4j", include_optional=include_optional)
networkx_metrics = await get_metrics(provider="networkx", include_optional=include_optional)
kuzu_metrics = await get_metrics(provider="kuzu", include_optional=include_optional)
diff_keys = set(neo4j_metrics.keys()).symmetric_difference(set(networkx_metrics.keys()))
diff_keys = set(neo4j_metrics.keys()).symmetric_difference(set(kuzu_metrics.keys()))
if diff_keys:
raise AssertionError(f"Metrics dictionaries have different keys: {diff_keys}")
for key, neo4j_value in neo4j_metrics.items():
assert networkx_metrics[key] == neo4j_value, (
f"Difference in '{key}': got {neo4j_value} with neo4j and {networkx_metrics[key]} with networkx"
assert kuzu_metrics[key] == neo4j_value, (
f"Difference in '{key}': got {neo4j_value} with neo4j and {kuzu_metrics[key]} with kuzu"
)

View file

@ -3,5 +3,10 @@ import asyncio
if __name__ == "__main__":
asyncio.run(assert_metrics(provider="networkx", include_optional=False))
asyncio.run(assert_metrics(provider="networkx", include_optional=True))
# NetworkX has been moved to the community package
# To run these tests, install the community networkx adapter first
# and update the imports accordingly
print("NetworkX tests have been moved to the community package")
print("Please install the community networkx adapter to run these tests")
# asyncio.run(assert_metrics(provider="networkx", include_optional=False))
# asyncio.run(assert_metrics(provider="networkx", include_optional=True))

View file

@ -99,7 +99,7 @@ async def main():
from cognee.infrastructure.databases.graph import get_graph_config
graph_config = get_graph_config()
assert not os.path.exists(graph_config.graph_file_path), "Networkx graph database is not empty"
assert not os.path.exists(graph_config.graph_file_path), "Graph database is not empty"
if __name__ == "__main__":

View file

@ -60,7 +60,7 @@ async def relational_db_migration():
relationship_label = "ReportsTo"
# 3. Directly verify the 'reports to' hierarchy
graph_db_provider = os.getenv("GRAPH_DATABASE_PROVIDER", "networkx").lower()
graph_db_provider = os.getenv("GRAPH_DATABASE_PROVIDER", "kuzu").lower()
distinct_node_names = set()
found_edges = set()

View file

@ -1,6 +1,5 @@
import os
import pytest
import networkx as nx
import pandas as pd
from unittest.mock import patch, mock_open
from io import BytesIO
@ -58,18 +57,27 @@ def test_get_file_content_hash_stream():
def test_prepare_edges():
graph = nx.MultiDiGraph()
graph.add_edge("A", "B", key="AB", weight=1)
# Test with tuple format (nodes, edges)
nodes = [("A", {"name": "Node A"}), ("B", {"name": "Node B"})]
edges = [("A", "B", "AB", {"weight": 1})]
graph = (nodes, edges)
edges_df = prepare_edges(graph, "source", "target", "key")
assert isinstance(edges_df, pd.DataFrame)
assert len(edges_df) == 1
assert edges_df.iloc[0]["source"] == "A"
assert edges_df.iloc[0]["target"] == "B"
assert edges_df.iloc[0]["key"] == "AB"
def test_prepare_nodes():
graph = nx.Graph()
graph.add_node(1, name="Node1")
# Test with tuple format (nodes, edges)
nodes = [(1, {"name": "Node1"}), (2, {"name": "Node2"})]
edges = []
graph = (nodes, edges)
nodes_df = prepare_nodes(graph)
assert isinstance(nodes_df, pd.DataFrame)
assert len(nodes_df) == 1
assert len(nodes_df) == 2
assert nodes_df.iloc[0]["name"] == "Node1"
assert nodes_df.iloc[1]["name"] == "Node2"

36
poetry.lock generated
View file

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand.
[[package]]
name = "aiobotocore"
@ -1406,6 +1406,27 @@ files = [
{file = "coverage-7.9.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0b3496922cb5f4215bf5caaef4cf12364a26b0be82e9ed6d050f3352cf2d7ef0"},
{file = "coverage-7.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9565c3ab1c93310569ec0d86b017f128f027cab0b622b7af288696d7ed43a16d"},
{file = "coverage-7.9.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2241ad5dbf79ae1d9c08fe52b36d03ca122fb9ac6bca0f34439e99f8327ac89f"},
{file = "coverage-7.9.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bb5838701ca68b10ebc0937dbd0eb81974bac54447c55cd58dea5bca8451029"},
{file = "coverage-7.9.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a25f814591a8c0c5372c11ac8967f669b97444c47fd794926e175c4047ece"},
{file = "coverage-7.9.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2d04b16a6062516df97969f1ae7efd0de9c31eb6ebdceaa0d213b21c0ca1a683"},
{file = "coverage-7.9.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7931b9e249edefb07cd6ae10c702788546341d5fe44db5b6108a25da4dca513f"},
{file = "coverage-7.9.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:52e92b01041151bf607ee858e5a56c62d4b70f4dac85b8c8cb7fb8a351ab2c10"},
{file = "coverage-7.9.1-cp313-cp313t-win32.whl", hash = "sha256:684e2110ed84fd1ca5f40e89aa44adf1729dc85444004111aa01866507adf363"},
{file = "coverage-7.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:437c576979e4db840539674e68c84b3cda82bc824dd138d56bead1435f1cb5d7"},
{file = "coverage-7.9.1-cp313-cp313t-win_arm64.whl", hash = "sha256:18a0912944d70aaf5f399e350445738a1a20b50fbea788f640751c2ed9208b6c"},
{file = "coverage-7.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f424507f57878e424d9a95dc4ead3fbdd72fd201e404e861e465f28ea469951"},
{file = "coverage-7.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:535fde4001b2783ac80865d90e7cc7798b6b126f4cd8a8c54acfe76804e54e58"},
{file = "coverage-7.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02532fd3290bb8fa6bec876520842428e2a6ed6c27014eca81b031c2d30e3f71"},
{file = "coverage-7.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56f5eb308b17bca3bbff810f55ee26d51926d9f89ba92707ee41d3c061257e55"},
{file = "coverage-7.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfa447506c1a52271f1b0de3f42ea0fa14676052549095e378d5bff1c505ff7b"},
{file = "coverage-7.9.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9ca8e220006966b4a7b68e8984a6aee645a0384b0769e829ba60281fe61ec4f7"},
{file = "coverage-7.9.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:49f1d0788ba5b7ba65933f3a18864117c6506619f5ca80326b478f72acf3f385"},
{file = "coverage-7.9.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:68cd53aec6f45b8e4724c0950ce86eacb775c6be01ce6e3669fe4f3a21e768ed"},
{file = "coverage-7.9.1-cp39-cp39-win32.whl", hash = "sha256:95335095b6c7b1cc14c3f3f17d5452ce677e8490d101698562b2ffcacc304c8d"},
{file = "coverage-7.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:e1b5191d1648acc439b24721caab2fd0c86679d8549ed2c84d5a7ec1bedcc244"},
{file = "coverage-7.9.1-pp39.pp310.pp311-none-any.whl", hash = "sha256:db0f04118d1db74db6c9e1cb1898532c7dcc220f1d2718f058601f7c3f499514"},
{file = "coverage-7.9.1-py3-none-any.whl", hash = "sha256:66b974b145aa189516b6bf2d8423e888b742517d37872f6ee4c5be0073bd9a3c"},
{file = "coverage-7.9.1.tar.gz", hash = "sha256:6cf43c78c4282708a28e466316935ec7489a9c487518a77fa68f716c67909cec"},
]
[package.dependencies]
@ -3988,6 +4009,8 @@ python-versions = "*"
groups = ["main"]
files = [
{file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"},
{file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"},
{file = "jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6"},
]
[package.dependencies]
@ -6231,10 +6254,10 @@ files = [
name = "networkx"
version = "3.4.2"
description = "Python package for creating and manipulating graphs and networks"
optional = false
optional = true
python-versions = ">=3.10"
groups = ["main"]
markers = "python_version == \"3.10\""
markers = "python_version == \"3.10\" and (extra == \"llama-index\" or extra == \"deepeval\" or extra == \"docs\")"
files = [
{file = "networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f"},
{file = "networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1"},
@ -6252,10 +6275,10 @@ test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"]
name = "networkx"
version = "3.5"
description = "Python package for creating and manipulating graphs and networks"
optional = false
optional = true
python-versions = ">=3.11"
groups = ["main"]
markers = "python_version >= \"3.11\" and python_version < \"3.13\" or python_full_version == \"3.13.0\""
markers = "python_version >= \"3.11\" and (extra == \"llama-index\" or extra == \"deepeval\" or extra == \"docs\")"
files = [
{file = "networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec"},
{file = "networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037"},
@ -7665,6 +7688,7 @@ files = [
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"},
@ -12215,4 +12239,4 @@ weaviate = ["weaviate-client"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.10,<=3.13"
content-hash = "ca2a3e8260092933419793efe202d50ae7b1c6ce738750876fd5f64a31718790"
content-hash = "817482806193782afc9c92e1644a12f10452d6e5920ed8ba9a27c3215b95fa5a"

View file

@ -44,7 +44,6 @@ dependencies = [
"pypdf>=4.1.0,<6.0.0",
"jinja2>=3.1.3,<4",
"matplotlib>=3.8.3,<4",
"networkx>=3.4.2,<4",
"lancedb==0.21.0",
"alembic>=1.13.3,<2",
"pre-commit>=4.0.1,<5",