From 7ec5cffd8e0df5ce067d35e2e544906aa48e3101 Mon Sep 17 00:00:00 2001 From: lxobr <122801072+lxobr@users.noreply.github.com> Date: Sat, 23 Nov 2024 14:02:21 +0100 Subject: [PATCH 01/52] feat: Cog-693 expand dependency graph Expand each file node into a subgraph containing high-level code parts - Implemented `extract_code_parts` to parse and extract high-level components (classes, functions, imports, and top-level code) from Python source files using `parso`. - Developed `expand_dependency_graph` to expand Python file nodes into their components. - Included a checker script --- .../code/enrich_dependency_graph_checker.py | 27 ++++++++ .../code/expand_dependency_graph_checker.py | 29 +++++++++ .../code/get_repo_dependency_graph_checker.py | 27 ++++++++ .../repo_processor/enrich_dependency_graph.py | 56 +++++++++++++++++ .../repo_processor/expand_dependency_graph.py | 49 +++++++++++++++ .../repo_processor/extract_code_parts.py | 59 ++++++++++++++++++ .../get_repo_dependency_graph.py | 61 +++++++++++++++++++ 7 files changed, 308 insertions(+) create mode 100644 cognee/tasks/code/enrich_dependency_graph_checker.py create mode 100644 cognee/tasks/code/expand_dependency_graph_checker.py create mode 100644 cognee/tasks/code/get_repo_dependency_graph_checker.py create mode 100644 cognee/tasks/repo_processor/enrich_dependency_graph.py create mode 100644 cognee/tasks/repo_processor/expand_dependency_graph.py create mode 100644 cognee/tasks/repo_processor/extract_code_parts.py create mode 100644 cognee/tasks/repo_processor/get_repo_dependency_graph.py diff --git a/cognee/tasks/code/enrich_dependency_graph_checker.py b/cognee/tasks/code/enrich_dependency_graph_checker.py new file mode 100644 index 000000000..3e56fab59 --- /dev/null +++ b/cognee/tasks/code/enrich_dependency_graph_checker.py @@ -0,0 +1,27 @@ +import os +import asyncio +import argparse +from cognee.tasks.repo_processor.get_repo_dependency_graph import get_repo_dependency_graph +from cognee.tasks.repo_processor.enrich_dependency_graph import enrich_dependency_graph + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("repo_path", help="Path to the repository") + args = parser.parse_args() + + repo_path = args.repo_path + if not os.path.exists(repo_path): + print(f"Error: The provided repository path does not exist: {repo_path}") + return + + graph = asyncio.run(get_repo_dependency_graph(repo_path)) + graph = asyncio.run(enrich_dependency_graph(graph)) + for node in graph.nodes: + print(f"Node: {node}") + for _, target, data in graph.out_edges(node, data=True): + print(f" Edge to {target}, data: {data}") + + +if __name__ == "__main__": + main() diff --git a/cognee/tasks/code/expand_dependency_graph_checker.py b/cognee/tasks/code/expand_dependency_graph_checker.py new file mode 100644 index 000000000..1b3ce3246 --- /dev/null +++ b/cognee/tasks/code/expand_dependency_graph_checker.py @@ -0,0 +1,29 @@ +import os +import asyncio +import argparse +from cognee.tasks.repo_processor.get_repo_dependency_graph import get_repo_dependency_graph +from cognee.tasks.repo_processor.enrich_dependency_graph import enrich_dependency_graph +from cognee.tasks.repo_processor.expand_dependency_graph import expand_dependency_graph + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("repo_path", help="Path to the repository") + args = parser.parse_args() + + repo_path = args.repo_path + if not os.path.exists(repo_path): + print(f"Error: The provided repository path does not exist: {repo_path}") + return + + graph = asyncio.run(get_repo_dependency_graph(repo_path)) + graph = asyncio.run(enrich_dependency_graph(graph)) + graph = expand_dependency_graph(graph) + for node in graph.nodes: + print(f"Node: {node}") + for _, target, data in graph.out_edges(node, data=True): + print(f" Edge to {target}, data: {data}") + + +if __name__ == "__main__": + main() diff --git a/cognee/tasks/code/get_repo_dependency_graph_checker.py b/cognee/tasks/code/get_repo_dependency_graph_checker.py new file mode 100644 index 000000000..57afc7274 --- /dev/null +++ b/cognee/tasks/code/get_repo_dependency_graph_checker.py @@ -0,0 +1,27 @@ +import os +import asyncio +import argparse +from cognee.tasks.repo_processor.get_repo_dependency_graph import get_repo_dependency_graph + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("repo_path", help="Path to the repository") + args = parser.parse_args() + + repo_path = args.repo_path + if not os.path.exists(repo_path): + print(f"Error: The provided repository path does not exist: {repo_path}") + return + + graph = asyncio.run(get_repo_dependency_graph(repo_path)) + + for node in graph.nodes: + print(f"Node: {node}") + edges = graph.edges(node, data=True) + for _, target, data in edges: + print(f" Edge to {target}, Relation: {data.get('relation')}") + + +if __name__ == "__main__": + main() diff --git a/cognee/tasks/repo_processor/enrich_dependency_graph.py b/cognee/tasks/repo_processor/enrich_dependency_graph.py new file mode 100644 index 000000000..c3aa9fd74 --- /dev/null +++ b/cognee/tasks/repo_processor/enrich_dependency_graph.py @@ -0,0 +1,56 @@ +import networkx as nx +from typing import Dict, List + + +def topologically_sort_subgraph(subgraph_node_to_indegree: Dict[str, int], graph: nx.DiGraph) -> List[str]: + """Performs a topological sort on a subgraph based on node indegrees.""" + results = [] + remaining_nodes = subgraph_node_to_indegree.copy() + while remaining_nodes: + next_node = min(remaining_nodes, key=remaining_nodes.get) + results.append(next_node) + for successor in graph.successors(next_node): + if successor in remaining_nodes: + remaining_nodes[successor] -= 1 + remaining_nodes.pop(next_node) + return results + + +def topologically_sort(graph: nx.DiGraph) -> List[str]: + """Performs a topological sort on the entire graph.""" + subgraphs = (graph.subgraph(c).copy() for c in nx.weakly_connected_components(graph)) + topological_order = [] + for subgraph in subgraphs: + node_to_indegree = { + node: len(list(subgraph.successors(node))) + for node in subgraph.nodes + } + topological_order.extend( + topologically_sort_subgraph(node_to_indegree, subgraph) + ) + return topological_order + + +def node_enrich_and_connect(graph: nx.MultiDiGraph, topological_order: List[str], node: str) -> None: + """Adds 'depends_on' edges to the graph based on topological order.""" + topological_rank = topological_order.index(node) + graph.nodes[node]['topological_rank'] = topological_rank + node_descendants = nx.descendants(graph, node) + if graph.has_edge(node,node): + node_descendants.add(node) + for desc in node_descendants: + if desc not in topological_order[:topological_rank+1]: + continue + graph.add_edge(node, desc, relation='depends_on') + + +async def enrich_dependency_graph(graph: nx.DiGraph) -> nx.MultiDiGraph: + """Enriches the graph with topological ranks and 'depends_on' edges.""" + graph = nx.MultiDiGraph(graph) + topological_order = topologically_sort(graph) + node_rank_map = {node: idx for idx, node in enumerate(topological_order)} + for node in graph.nodes: + if node not in node_rank_map: + continue + node_enrich_and_connect(graph, topological_order, node) + return graph diff --git a/cognee/tasks/repo_processor/expand_dependency_graph.py b/cognee/tasks/repo_processor/expand_dependency_graph.py new file mode 100644 index 000000000..a6b724302 --- /dev/null +++ b/cognee/tasks/repo_processor/expand_dependency_graph.py @@ -0,0 +1,49 @@ +import networkx as nx + +from cognee.tasks.repo_processor.extract_code_parts import extract_code_parts +from cognee.tasks.repo_processor import logger + + +def _add_code_parts_nodes_and_edges(graph, parent_node_id, part_type, code_parts): + """Add code part nodes and edges for a specific part type.""" + if not code_parts: + logger.debug(f"No code parts to add for parent_node_id {parent_node_id} and part_type {part_type}.") + return + + for idx, code_part in enumerate(code_parts): + if not code_part.strip(): + logger.warning(f"Empty code part in parent_node_id {parent_node_id} and part_type {part_type}.") + continue + part_node_id = f"{parent_node_id}_{part_type}_{idx}" + graph.add_node(part_node_id, source_code=code_part, node_type=part_type) + graph.add_edge(parent_node_id, part_node_id, relation="contains") + + +def _process_single_node(graph, node_id, node_data): + """Process a single Python file node.""" + graph.nodes[node_id]["node_type"] = "python_file" + source_code = node_data.get("source_code", "") + + if not source_code.strip(): + logger.warning(f"Node {node_id} has no or empty 'source_code'. Skipping.") + return + + try: + code_parts_dict = extract_code_parts(source_code) + except Exception as e: + logger.error(f"Error processing node {node_id}: {e}") + return + + for part_type, code_parts in code_parts_dict.items(): + _add_code_parts_nodes_and_edges(graph, node_id, part_type, code_parts) + + +def expand_dependency_graph(graph: nx.MultiDiGraph) -> nx.MultiDiGraph: + """Process Python file nodes, adding code part nodes and edges.""" + expanded_graph = graph.copy() + for node_id, node_data in graph.nodes(data=True): + if not node_data: # Check if node_data is empty + logger.warning(f"Node {node_id} has no data. Skipping.") + continue + _process_single_node(expanded_graph, node_id, node_data) + return expanded_graph diff --git a/cognee/tasks/repo_processor/extract_code_parts.py b/cognee/tasks/repo_processor/extract_code_parts.py new file mode 100644 index 000000000..d772c73f5 --- /dev/null +++ b/cognee/tasks/repo_processor/extract_code_parts.py @@ -0,0 +1,59 @@ +from typing import Dict, List +import parso + +from cognee.tasks.repo_processor import logger + + +def _extract_parts_from_module(module, parts_dict: Dict[str, List[str]]) -> Dict[str, List[str]]: + """Extract code parts from a parsed module.""" + + current_top_level_code = [] + child_to_code_type = { + 'classdef': "classes", + 'funcdef': "functions", + 'import_name': "imports", + 'import_from': "imports", + } + + for child in module.children: + if child.type == 'simple_stmt': + current_top_level_code.append(child.get_code()) + continue + + if current_top_level_code: + parts_dict["top_level_code"].append('\n'.join(current_top_level_code)) + current_top_level_code = [] + + if child.type in child_to_code_type: + code_type = child_to_code_type[child.type] + parts_dict[code_type].append(child.get_code()) + + if current_top_level_code: + parts_dict["top_level_code"].append('\n'.join(current_top_level_code)) + + if parts_dict["imports"]: + parts_dict["imports"] = ['\n'.join(parts_dict["imports"])] + + return parts_dict + + +def extract_code_parts(source_code: str) -> Dict[str, List[str]]: + """Extract high-level parts of the source code.""" + + parts_dict = {"classes": [], "functions": [], "imports": [], "top_level_code": []} + + if not source_code.strip(): + logger.warning("Empty source_code provided.") + return parts_dict + + try: + module = parso.parse(source_code) + except Exception as e: + logger.error(f"Error parsing source code: {e}") + return parts_dict + + if not module.children: + logger.warning("Parsed module has no children (empty or invalid source code).") + return parts_dict + + return _extract_parts_from_module(module, parts_dict) diff --git a/cognee/tasks/repo_processor/get_repo_dependency_graph.py b/cognee/tasks/repo_processor/get_repo_dependency_graph.py new file mode 100644 index 000000000..6a8184cd3 --- /dev/null +++ b/cognee/tasks/repo_processor/get_repo_dependency_graph.py @@ -0,0 +1,61 @@ +import os +import aiofiles + +import networkx as nx +from typing import Dict, List + +from cognee.tasks.repo_processor.get_local_dependencies import get_local_script_dependencies + + +async def get_py_path_and_source(file_path, repo_path): + relative_path = os.path.relpath(file_path, repo_path) + try: + async with aiofiles.open(file_path, "r", encoding="utf-8") as f: + source_code = await f.read() + return relative_path, source_code + except Exception as e: + print(f"Error reading file {file_path}: {e}") + return relative_path, None + + +async def get_py_files_dict(repo_path): + """Get .py files and their source code""" + if not os.path.exists(repo_path): + return {} + + py_files_paths = ( + os.path.join(root, file) + for root, _, files in os.walk(repo_path) for file in files if file.endswith(".py") + ) + + py_files_dict = {} + for file_path in py_files_paths: + relative_path, source_code = await get_py_path_and_source(file_path, repo_path) + py_files_dict[relative_path] = {"source_code": source_code} + + return py_files_dict + +def get_edge(file_path: str, dependency: str, repo_path: str, relative_paths: bool = True) -> tuple: + if relative_paths: + file_path = os.path.relpath(file_path, repo_path) + dependency = os.path.relpath(dependency, repo_path) + return (file_path, dependency, {"relation": "depends_directly_on"}) + + +async def get_repo_dependency_graph(repo_path: str) -> nx.DiGraph: + """Generate a dependency graph for Python files in the given repository path.""" + py_files_dict = await get_py_files_dict(repo_path) + + dependency_graph = nx.DiGraph() + + dependency_graph.add_nodes_from(py_files_dict.items()) + + for file_path, metadata in py_files_dict.items(): + source_code = metadata.get("source_code") + if source_code is None: + continue + + dependencies = await get_local_script_dependencies(file_path, repo_path) + dependency_edges = [get_edge(file_path, dependency, repo_path) for dependency in dependencies] + dependency_graph.add_edges_from(dependency_edges) + return dependency_graph From 70bdaea8f7405d2cd87c7889ccda4d2a7ff56138 Mon Sep 17 00:00:00 2001 From: lxobr <122801072+lxobr@users.noreply.github.com> Date: Sat, 23 Nov 2024 14:04:45 +0100 Subject: [PATCH 02/52] feat: COG-546 create dependency graph Add `get_repo_dependency_graph` cognee task: - Extracts Python file paths and their source code from a repository. - Builds a directed dependency graph of Python files. - Identifies direct dependencies using `get_local_script_dependencies`. - Represents file relationships as nodes and edges in a `networkx.DiGraph`. - Includes a checker script for generating and displaying the graph. From 80b06c3acb93294ed091de5e432c00a85acdbcfe Mon Sep 17 00:00:00 2001 From: 0xideas Date: Sun, 24 Nov 2024 19:24:47 +0100 Subject: [PATCH 03/52] test: Test for code graph enrichment task Co-authored-by: lxobr --- .github/workflows/test_python_3_10.yml | 6 + .github/workflows/test_python_3_11.yml | 6 + .github/workflows/test_python_3_9.yml | 6 + .gitignore | 2 + cognee/modules/engine/models/Entity.py | 8 +- cognee/modules/engine/models/EntityType.py | 6 +- cognee/modules/graph/utils/__init__.py | 2 + .../utils/expand_with_nodes_and_edges.py | 83 ++++++++++++ .../graph/utils/retrieve_existing_edges.py | 55 ++++++++ cognee/shared/CodeGraphEntities.py | 23 ++++ .../graph/convert_graph_from_code_graph.py | 105 +++++++++++++++ cognee/tasks/graph/extract_graph_from_data.py | 125 +++--------------- .../get_repo_dependency_graph.py | 2 +- cognee/tasks/summarization/__init__.py | 3 +- cognee/tasks/summarization/models.py | 11 ++ cognee/tasks/summarization/summarize_code.py | 35 +++++ .../graph/code_graph_test_data_generation.py | 51 +++++++ .../convert_graph_from_code_graph_test.py | 27 ++++ .../summarization/summarize_code_test.py | 15 +++ examples/python/code_graph_pipeline.py | 64 +++++++++ 20 files changed, 527 insertions(+), 108 deletions(-) create mode 100644 cognee/modules/graph/utils/expand_with_nodes_and_edges.py create mode 100644 cognee/modules/graph/utils/retrieve_existing_edges.py create mode 100644 cognee/shared/CodeGraphEntities.py create mode 100644 cognee/tasks/graph/convert_graph_from_code_graph.py create mode 100644 cognee/tasks/summarization/summarize_code.py create mode 100644 cognee/tests/tasks/graph/code_graph_test_data_generation.py create mode 100644 cognee/tests/tasks/graph/convert_graph_from_code_graph_test.py create mode 100644 cognee/tests/tasks/summarization/summarize_code_test.py create mode 100644 examples/python/code_graph_pipeline.py diff --git a/.github/workflows/test_python_3_10.yml b/.github/workflows/test_python_3_10.yml index 7f762d778..a7ea005ef 100644 --- a/.github/workflows/test_python_3_10.yml +++ b/.github/workflows/test_python_3_10.yml @@ -56,6 +56,12 @@ jobs: - name: Run integration tests run: poetry run pytest cognee/tests/integration/ + - name: Run summarize_code test + run: poetry run pytest cognee/tests/tasks/summarization/summarize_code_test.py + env: + ENV: 'dev' + LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} + - name: Run default basic pipeline env: ENV: 'dev' diff --git a/.github/workflows/test_python_3_11.yml b/.github/workflows/test_python_3_11.yml index b05d901dc..18b04cd94 100644 --- a/.github/workflows/test_python_3_11.yml +++ b/.github/workflows/test_python_3_11.yml @@ -56,6 +56,12 @@ jobs: - name: Run integration tests run: poetry run pytest cognee/tests/integration/ + - name: Run summarize_code test + run: poetry run pytest cognee/tests/tasks/summarization/summarize_code_test.py + env: + ENV: 'dev' + LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} + - name: Run default basic pipeline env: ENV: 'dev' diff --git a/.github/workflows/test_python_3_9.yml b/.github/workflows/test_python_3_9.yml index 47c5ddc41..54194bd19 100644 --- a/.github/workflows/test_python_3_9.yml +++ b/.github/workflows/test_python_3_9.yml @@ -56,6 +56,12 @@ jobs: - name: Run integration tests run: poetry run pytest cognee/tests/integration/ + - name: Run summarize_code test + run: poetry run pytest cognee/tests/tasks/summarization/summarize_code_test.py + env: + ENV: 'dev' + LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} + - name: Run default basic pipeline env: ENV: 'dev' diff --git a/.gitignore b/.gitignore index d256013d2..47fa54130 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,8 @@ .prod.env cognee/.data/ +code_pipeline_output*/ + *.lance/ .DS_Store # Byte-compiled / optimized / DLL files diff --git a/cognee/modules/engine/models/Entity.py b/cognee/modules/engine/models/Entity.py index cf946ceb6..c5579a610 100644 --- a/cognee/modules/engine/models/Entity.py +++ b/cognee/modules/engine/models/Entity.py @@ -1,13 +1,17 @@ +from typing import Union + from cognee.infrastructure.engine import DataPoint from cognee.modules.chunking.models.DocumentChunk import DocumentChunk -from .EntityType import EntityType +from cognee.modules.engine.models.EntityType import EntityType +from cognee.shared.CodeGraphEntities import Repository + class Entity(DataPoint): __tablename__ = "entity" name: str is_a: EntityType description: str - mentioned_in: DocumentChunk + mentioned_in: Union[DocumentChunk, Repository] _metadata: dict = { "index_fields": ["name"], } diff --git a/cognee/modules/engine/models/EntityType.py b/cognee/modules/engine/models/EntityType.py index 56092f261..685958935 100644 --- a/cognee/modules/engine/models/EntityType.py +++ b/cognee/modules/engine/models/EntityType.py @@ -1,12 +1,16 @@ +from typing import Union + from cognee.infrastructure.engine import DataPoint from cognee.modules.chunking.models.DocumentChunk import DocumentChunk +from cognee.shared.CodeGraphEntities import Repository + class EntityType(DataPoint): __tablename__ = "entity_type" name: str type: str description: str - exists_in: DocumentChunk + exists_in: Union[DocumentChunk, Repository] _metadata: dict = { "index_fields": ["name"], } diff --git a/cognee/modules/graph/utils/__init__.py b/cognee/modules/graph/utils/__init__.py index 18e7ac29c..6fbe2ee99 100644 --- a/cognee/modules/graph/utils/__init__.py +++ b/cognee/modules/graph/utils/__init__.py @@ -1,2 +1,4 @@ +from .expand_with_nodes_and_edges import expand_with_nodes_and_edges from .get_graph_from_model import get_graph_from_model from .get_model_instance_from_graph import get_model_instance_from_graph +from .retrieve_existing_edges import retrieve_existing_edges diff --git a/cognee/modules/graph/utils/expand_with_nodes_and_edges.py b/cognee/modules/graph/utils/expand_with_nodes_and_edges.py new file mode 100644 index 000000000..5b443d12a --- /dev/null +++ b/cognee/modules/graph/utils/expand_with_nodes_and_edges.py @@ -0,0 +1,83 @@ +from typing import Optional + +from cognee.infrastructure.engine import DataPoint +from cognee.modules.engine.models import Entity, EntityType +from cognee.modules.engine.utils import ( + generate_edge_name, + generate_node_id, + generate_node_name, +) +from cognee.shared.data_models import KnowledgeGraph + + +def expand_with_nodes_and_edges( + graph_node_index: list[tuple[DataPoint, KnowledgeGraph]], + existing_edges_map: Optional[dict[str, bool]] = None, +): + if existing_edges_map is None: + existing_edges_map = {} + + added_nodes_map = {} + relationships = [] + data_points = [] + + for graph_source, graph in graph_node_index: + if graph is None: + continue + + for node in graph.nodes: + node_id = generate_node_id(node.id) + node_name = generate_node_name(node.name) + + type_node_id = generate_node_id(node.type) + type_node_name = generate_node_name(node.type) + + if f"{str(type_node_id)}_type" not in added_nodes_map: + type_node = EntityType( + id=type_node_id, + name=type_node_name, + type=type_node_name, + description=type_node_name, + exists_in=graph_source, + ) + added_nodes_map[f"{str(type_node_id)}_type"] = type_node + else: + type_node = added_nodes_map[f"{str(type_node_id)}_type"] + + if f"{str(node_id)}_entity" not in added_nodes_map: + entity_node = Entity( + id=node_id, + name=node_name, + is_a=type_node, + description=node.description, + mentioned_in=graph_source, + ) + data_points.append(entity_node) + added_nodes_map[f"{str(node_id)}_entity"] = entity_node + + # Add relationship that came from graphs. + for edge in graph.edges: + source_node_id = generate_node_id(edge.source_node_id) + target_node_id = generate_node_id(edge.target_node_id) + relationship_name = generate_edge_name(edge.relationship_name) + + edge_key = str(source_node_id) + str(target_node_id) + relationship_name + + if edge_key not in existing_edges_map: + relationships.append( + ( + source_node_id, + target_node_id, + edge.relationship_name, + dict( + relationship_name=generate_edge_name( + edge.relationship_name + ), + source_node_id=source_node_id, + target_node_id=target_node_id, + ), + ) + ) + existing_edges_map[edge_key] = True + + return (data_points, relationships) diff --git a/cognee/modules/graph/utils/retrieve_existing_edges.py b/cognee/modules/graph/utils/retrieve_existing_edges.py new file mode 100644 index 000000000..6f237061a --- /dev/null +++ b/cognee/modules/graph/utils/retrieve_existing_edges.py @@ -0,0 +1,55 @@ +from cognee.infrastructure.databases.graph.graph_db_interface import GraphDBInterface +from cognee.infrastructure.engine import DataPoint +from cognee.modules.engine.utils import generate_node_id +from cognee.shared.data_models import KnowledgeGraph + + +async def retrieve_existing_edges( + graph_node_index: list[tuple[DataPoint, KnowledgeGraph]], + graph_engine: GraphDBInterface, +) -> dict[str, bool]: + processed_nodes = {} + type_node_edges = [] + entity_node_edges = [] + type_entity_edges = [] + + for graph_source, graph in graph_node_index: + for node in graph.nodes: + type_node_id = generate_node_id(node.type) + entity_node_id = generate_node_id(node.id) + + if str(type_node_id) not in processed_nodes: + type_node_edges.append( + (str(graph_source), str(type_node_id), "exists_in") + ) + processed_nodes[str(type_node_id)] = True + + if str(entity_node_id) not in processed_nodes: + entity_node_edges.append( + (str(graph_source), entity_node_id, "mentioned_in") + ) + type_entity_edges.append( + (str(entity_node_id), str(type_node_id), "is_a") + ) + processed_nodes[str(entity_node_id)] = True + + graph_node_edges = [ + (edge.target_node_id, edge.source_node_id, edge.relationship_name) + for edge in graph.edges + ] + + existing_edges = await graph_engine.has_edges( + [ + *type_node_edges, + *entity_node_edges, + *type_entity_edges, + *graph_node_edges, + ] + ) + + existing_edges_map = {} + + for edge in existing_edges: + existing_edges_map[edge[0] + edge[1] + edge[2]] = True + + return existing_edges_map diff --git a/cognee/shared/CodeGraphEntities.py b/cognee/shared/CodeGraphEntities.py new file mode 100644 index 000000000..f0061406e --- /dev/null +++ b/cognee/shared/CodeGraphEntities.py @@ -0,0 +1,23 @@ +from typing import Any, List, Literal, Optional, Union + +from cognee.infrastructure.engine import DataPoint + + +class Repository(DataPoint): + path: str + + +class CodeFile(DataPoint): + extracted_id: str # actually file path + type: str + source_code: str + + _metadata: dict = { + "index_fields": ["source_code"] + } + +class CodeRelationship(DataPoint): + source_id: str + target_id: str + type: str # between files + relation: str # depends on or depends directly diff --git a/cognee/tasks/graph/convert_graph_from_code_graph.py b/cognee/tasks/graph/convert_graph_from_code_graph.py new file mode 100644 index 000000000..91eaf660a --- /dev/null +++ b/cognee/tasks/graph/convert_graph_from_code_graph.py @@ -0,0 +1,105 @@ +from uuid import UUID, uuid4 +import os +import networkx as nx + +from cognee.infrastructure.databases.graph import get_graph_engine +from cognee.modules.graph.utils import ( + expand_with_nodes_and_edges, + retrieve_existing_edges, +) +from cognee.shared.CodeGraphEntities import CodeFile, CodeRelationship, Repository +from cognee.shared.data_models import Edge, KnowledgeGraph, Node +from cognee.tasks.storage import add_data_points + + +async def convert_graph_from_code_graph( + graph: nx.DiGraph, repo_path: str +) -> tuple[str, list[CodeFile], list[CodeRelationship]]: + + repo, nodes, edges = code_objects_from_di_graph(graph, repo_path) + + graph_engine = await get_graph_engine() + + code_knowledge_graph = build_code_knowledge_graph(nodes, edges) + repo_and_knowledge_graph = [(repo, code_knowledge_graph)] + + existing_edges_map = await retrieve_existing_edges( + repo_and_knowledge_graph, graph_engine + ) + + graph_nodes, graph_edges = expand_with_nodes_and_edges( + repo_and_knowledge_graph, existing_edges_map + ) + + if len(graph_nodes) > 0: + await add_data_points(graph_nodes) + + if len(graph_edges) > 0: + await graph_engine.add_edges(graph_edges) + + return nodes + + +def convert_node(node: CodeFile) -> Node: + return Node( + id=str(node.id), + name=node.extracted_id, + type=node.type, + description=f"{node.source_code = }", + properties={}, + ) + + +def convert_edge(edge: CodeRelationship, extracted_ids_to_ids: dict[str, UUID]) -> Edge: + return Edge( + source_node_id=str(extracted_ids_to_ids[edge.source_id]), + target_node_id=str(extracted_ids_to_ids[edge.target_id]), + relationship_name=f"{edge.type}_{edge.relation}", + ) + + +def build_code_knowledge_graph(nodes: list[CodeFile], edges: list[CodeRelationship]): + extracted_ids_to_ids = {node.extracted_id: node.id for node in nodes} + graph_nodes = [convert_node(node) for node in nodes] + graph_edges = [convert_edge(edge, extracted_ids_to_ids) for edge in edges] + return KnowledgeGraph(nodes=graph_nodes, edges=graph_edges) + + +def create_code_file(path, type): + abspath = os.path.abspath(path) + print(f"{path = } - {abspath = }") + with open(abspath, "r") as f: + source_code = f.read() + code_file = CodeFile(extracted_id=abspath, type=type, source_code=source_code) + return (code_file, abspath) + + +def create_code_relationship( + source_path: str, target_path: str, type: str, relation: str +): + return CodeRelationship( + source_id=source_path, target_id=target_path, type=type, relation=relation + ) + + +def code_objects_from_di_graph( + graph: nx.DiGraph, repo_path: str +) -> tuple[Repository, list[CodeFile], list[CodeRelationship]]: + repo = Repository(path=repo_path) + + code_files = [ + create_code_file(os.path.join(repo_path, path), "python_file")[0] + for path in graph.nodes + ] + + code_relationships = [ + create_code_relationship( + os.path.join(repo_path, source), + os.path.join(repo_path, target), + "python_file", + graph.get_edge_data(source, target, v)["relation"], + ) + for source, target, v in graph.edges + ] + + return (repo, code_files, code_relationships) diff --git a/cognee/tasks/graph/extract_graph_from_data.py b/cognee/tasks/graph/extract_graph_from_data.py index 9e6edcabd..ad6ae19d2 100644 --- a/cognee/tasks/graph/extract_graph_from_data.py +++ b/cognee/tasks/graph/extract_graph_from_data.py @@ -1,119 +1,38 @@ import asyncio from typing import Type + from pydantic import BaseModel + from cognee.infrastructure.databases.graph import get_graph_engine -from cognee.modules.data.extraction.knowledge_graph import extract_content_graph from cognee.modules.chunking.models.DocumentChunk import DocumentChunk -from cognee.modules.engine.models import EntityType, Entity -from cognee.modules.engine.utils import generate_edge_name, generate_node_id, generate_node_name +from cognee.modules.data.extraction.knowledge_graph import extract_content_graph +from cognee.modules.graph.utils import ( + expand_with_nodes_and_edges, + retrieve_existing_edges, +) from cognee.tasks.storage import add_data_points -async def extract_graph_from_data(data_chunks: list[DocumentChunk], graph_model: Type[BaseModel]): + +async def extract_graph_from_data( + data_chunks: list[DocumentChunk], graph_model: Type[BaseModel] +): chunk_graphs = await asyncio.gather( *[extract_content_graph(chunk.text, graph_model) for chunk in data_chunks] ) - - processed_nodes = {} - type_node_edges = [] - entity_node_edges = [] - type_entity_edges = [] - - for (chunk_index, chunk) in enumerate(data_chunks): - chunk_graph = chunk_graphs[chunk_index] - for node in chunk_graph.nodes: - type_node_id = generate_node_id(node.type) - entity_node_id = generate_node_id(node.id) - - if str(type_node_id) not in processed_nodes: - type_node_edges.append((str(chunk.id), str(type_node_id), "exists_in")) - processed_nodes[str(type_node_id)] = True - - if str(entity_node_id) not in processed_nodes: - entity_node_edges.append((str(chunk.id), entity_node_id, "mentioned_in")) - type_entity_edges.append((str(entity_node_id), str(type_node_id), "is_a")) - processed_nodes[str(entity_node_id)] = True - - graph_node_edges = [ - (edge.target_node_id, edge.source_node_id, edge.relationship_name) \ - for edge in chunk_graph.edges - ] - graph_engine = await get_graph_engine() + chunk_and_chunk_graphs = [ + (chunk, chunk_graph) for chunk, chunk_graph in zip(data_chunks, chunk_graphs) + ] + existing_edges_map = await retrieve_existing_edges( + chunk_and_chunk_graphs, graph_engine + ) - existing_edges = await graph_engine.has_edges([ - *type_node_edges, - *entity_node_edges, - *type_entity_edges, - *graph_node_edges, - ]) + graph_nodes, graph_edges = expand_with_nodes_and_edges( + chunk_and_chunk_graphs, existing_edges_map + ) - existing_edges_map = {} - - for edge in existing_edges: - existing_edges_map[edge[0] + edge[1] + edge[2]] = True - - added_nodes_map = {} - graph_edges = [] - data_points = [] - - for (chunk_index, chunk) in enumerate(data_chunks): - graph = chunk_graphs[chunk_index] - if graph is None: - continue - - for node in graph.nodes: - node_id = generate_node_id(node.id) - node_name = generate_node_name(node.name) - - type_node_id = generate_node_id(node.type) - type_node_name = generate_node_name(node.type) - - if f"{str(type_node_id)}_type" not in added_nodes_map: - type_node = EntityType( - id = type_node_id, - name = type_node_name, - type = type_node_name, - description = type_node_name, - exists_in = chunk, - ) - added_nodes_map[f"{str(type_node_id)}_type"] = type_node - else: - type_node = added_nodes_map[f"{str(type_node_id)}_type"] - - if f"{str(node_id)}_entity" not in added_nodes_map: - entity_node = Entity( - id = node_id, - name = node_name, - is_a = type_node, - description = node.description, - mentioned_in = chunk, - ) - data_points.append(entity_node) - added_nodes_map[f"{str(node_id)}_entity"] = entity_node - - # Add relationship that came from graphs. - for edge in graph.edges: - source_node_id = generate_node_id(edge.source_node_id) - target_node_id = generate_node_id(edge.target_node_id) - relationship_name = generate_edge_name(edge.relationship_name) - - edge_key = str(source_node_id) + str(target_node_id) + relationship_name - - if edge_key not in existing_edges_map: - graph_edges.append(( - source_node_id, - target_node_id, - edge.relationship_name, - dict( - relationship_name = generate_edge_name(edge.relationship_name), - source_node_id = source_node_id, - target_node_id = target_node_id, - ), - )) - existing_edges_map[edge_key] = True - - if len(data_points) > 0: - await add_data_points(data_points) + if len(graph_nodes) > 0: + await add_data_points(graph_nodes) if len(graph_edges) > 0: await graph_engine.add_edges(graph_edges) diff --git a/cognee/tasks/repo_processor/get_repo_dependency_graph.py b/cognee/tasks/repo_processor/get_repo_dependency_graph.py index 6a8184cd3..b36607ab8 100644 --- a/cognee/tasks/repo_processor/get_repo_dependency_graph.py +++ b/cognee/tasks/repo_processor/get_repo_dependency_graph.py @@ -55,7 +55,7 @@ async def get_repo_dependency_graph(repo_path: str) -> nx.DiGraph: if source_code is None: continue - dependencies = await get_local_script_dependencies(file_path, repo_path) + dependencies = await get_local_script_dependencies(os.path.join(repo_path, file_path), repo_path) dependency_edges = [get_edge(file_path, dependency, repo_path) for dependency in dependencies] dependency_graph.add_edges_from(dependency_edges) return dependency_graph diff --git a/cognee/tasks/summarization/__init__.py b/cognee/tasks/summarization/__init__.py index cd459ac0b..a9a330cb2 100644 --- a/cognee/tasks/summarization/__init__.py +++ b/cognee/tasks/summarization/__init__.py @@ -1,2 +1,3 @@ -from .summarize_text import summarize_text from .query_summaries import query_summaries +from .summarize_code import summarize_code +from .summarize_text import summarize_text diff --git a/cognee/tasks/summarization/models.py b/cognee/tasks/summarization/models.py index 955c0e2fa..66fa4fc60 100644 --- a/cognee/tasks/summarization/models.py +++ b/cognee/tasks/summarization/models.py @@ -1,6 +1,8 @@ from cognee.infrastructure.engine import DataPoint from cognee.modules.chunking.models.DocumentChunk import DocumentChunk from cognee.modules.data.processing.document_types import Document +from cognee.shared.CodeGraphEntities import CodeFile + class TextSummary(DataPoint): __tablename__ = "text_summary" @@ -10,3 +12,12 @@ class TextSummary(DataPoint): _metadata: dict = { "index_fields": ["text"], } + + +class CodeSummary(DataPoint): + text: str + made_from: CodeFile + + _metadata: dict = { + "index_fields": ["text"], + } diff --git a/cognee/tasks/summarization/summarize_code.py b/cognee/tasks/summarization/summarize_code.py new file mode 100644 index 000000000..597bc4026 --- /dev/null +++ b/cognee/tasks/summarization/summarize_code.py @@ -0,0 +1,35 @@ +import asyncio +from typing import Type, Union +from uuid import uuid5 + +from pydantic import BaseModel + +from cognee.modules.data.extraction.extract_summary import extract_summary +from cognee.shared.CodeGraphEntities import CodeFile +from cognee.tasks.storage import add_data_points + +from .models import CodeSummary + + +async def summarize_code( + code_files: list[CodeFile], summarization_model: Type[BaseModel] +) -> list[CodeFile]: + if len(code_files) == 0: + return code_files + + file_summaries = await asyncio.gather( + *[extract_summary(file.source_code, summarization_model) for file in code_files] + ) + + summaries = [ + CodeSummary( + id=uuid5(file.id, "CodeSummary"), + made_from=file, + text=file_summaries[file_index].summary, + ) + for (file_index, file) in enumerate(code_files) + ] + + await add_data_points(summaries) + + return code_files, summaries diff --git a/cognee/tests/tasks/graph/code_graph_test_data_generation.py b/cognee/tests/tasks/graph/code_graph_test_data_generation.py new file mode 100644 index 000000000..74ca2de71 --- /dev/null +++ b/cognee/tests/tasks/graph/code_graph_test_data_generation.py @@ -0,0 +1,51 @@ +import random +import string + +import numpy as np + +from cognee.shared.CodeGraphEntities import CodeFile, CodeRelationship + + +def random_str(n, spaces=True): + candidates = string.ascii_letters + string.digits + if spaces: + candidates += " " + return "".join(random.choice(candidates) for _ in range(n)) + + +def code_graph_test_data_generation(): + nodes = [ + CodeFile( + extracted_id=random_str(10, spaces=False), + type="file", + source_code=random_str(random.randrange(50, 500)), + ) + for _ in range(100) + ] + n_nodes = len(nodes) + first_source = np.random.randint(0, n_nodes) + reached_nodes = {first_source} + last_iteration = [first_source] + edges = [] + while len(reached_nodes) < n_nodes: + for source in last_iteration: + last_iteration = [] + tries = 0 + while ((len(last_iteration) == 0 or tries < 500)) and ( + len(reached_nodes) < n_nodes + ): + tries += 1 + target = np.random.randint(n_nodes) + if target not in reached_nodes: + last_iteration.append(target) + edges.append( + CodeRelationship( + source_id=nodes[source].extracted_id, + target_id=nodes[target].extracted_id, + type="files", + relation="depends", + ) + ) + reached_nodes = reached_nodes.union(set(last_iteration)) + + return (nodes, edges) diff --git a/cognee/tests/tasks/graph/convert_graph_from_code_graph_test.py b/cognee/tests/tasks/graph/convert_graph_from_code_graph_test.py new file mode 100644 index 000000000..755840b01 --- /dev/null +++ b/cognee/tests/tasks/graph/convert_graph_from_code_graph_test.py @@ -0,0 +1,27 @@ +import asyncio + +import pytest + +from cognee.shared.CodeGraphEntities import Repository +from cognee.tasks.graph.convert_graph_from_code_graph import ( + convert_graph_from_code_graph, +) +from cognee.tests.tasks.graph.code_graph_test_data_generation import ( + code_graph_test_data_generation, +) + + +def test_convert_graph_from_code_graph(): + repo = Repository(path="test/repo/path") + nodes, edges = code_graph_test_data_generation() + repo_out, nodes_out, edges_out = asyncio.run( + convert_graph_from_code_graph(repo, nodes, edges) + ) + + assert repo == repo_out, f"{repo = } != {repo_out = }" + + for node_in, node_out in zip(nodes, nodes_out): + assert node_in == node_out, f"{node_in = } != {node_out = }" + + for edge_in, edge_out in zip(edges, edges_out): + assert edge_in == edge_out, f"{edge_in = } != {edge_out = }" diff --git a/cognee/tests/tasks/summarization/summarize_code_test.py b/cognee/tests/tasks/summarization/summarize_code_test.py new file mode 100644 index 000000000..5865a05fd --- /dev/null +++ b/cognee/tests/tasks/summarization/summarize_code_test.py @@ -0,0 +1,15 @@ +import asyncio + +from cognee.shared.data_models import SummarizedContent +from cognee.tasks.summarization import summarize_code +from cognee.tests.tasks.graph.code_graph_test_data_generation import ( + code_graph_test_data_generation, +) + + +def test_summarize_code(): + nodes, _ = code_graph_test_data_generation() + nodes_out = asyncio.run(summarize_code(nodes, SummarizedContent)) + + for node_in, node_out in zip(nodes, nodes_out): + assert node_in == node_out, f"{node_in = } != {node_out = }" diff --git a/examples/python/code_graph_pipeline.py b/examples/python/code_graph_pipeline.py new file mode 100644 index 000000000..e4fe29840 --- /dev/null +++ b/examples/python/code_graph_pipeline.py @@ -0,0 +1,64 @@ +import argparse +import asyncio +import os +import cognee +import json + +import numpy as np +from networkx.classes.digraph import DiGraph + +from cognee.modules.pipelines import Task, run_tasks +from cognee.shared.CodeGraphEntities import CodeFile, CodeRelationship, Repository +from cognee.shared.data_models import SummarizedContent +from cognee.tasks.code.get_local_dependencies_checker import ( + get_local_script_dependencies, +) +from cognee.tasks.graph.convert_graph_from_code_graph import ( + convert_graph_from_code_graph, +) +from cognee.tasks.repo_processor.get_repo_dependency_graph import ( + get_repo_dependency_graph, +) +from cognee.tasks.repo_processor.enrich_dependency_graph import enrich_dependency_graph +from cognee.tasks.summarization import summarize_code +from cognee.tasks.storage import index_data_points + +async def print_results(pipeline): + async for result in pipeline: + print(result) + +async def write_results(repo, pipeline): + output_dir = os.path.join(repo, "code_pipeline_output", "") + os.makedirs(output_dir, exist_ok = True) + async for code_files, summaries in pipeline: + for summary in summaries: + file_name = os.path.split(summary.made_from.extracted_id)[-1] + relpath = os.path.join(*os.path.split(os.path.relpath(summary.made_from.extracted_id, repo))[:-1]) + output_dir2 = os.path.join(repo, "code_pipeline_output", relpath) + os.makedirs(output_dir2, exist_ok=True) + with open(os.path.join(output_dir2, file_name.replace(".py", ".json")), "w") as f: + f.write(json.dumps({"summary": summary.text, "source_code": summary.made_from.source_code})) + +async def reset_system(): + await cognee.prune.prune_data() + await cognee.prune.prune_system(metadata=True) + return(True) + +if __name__ == "__main__": + + parser = argparse.ArgumentParser(description="Process a file path") + parser.add_argument("path", help="Path to the file") + + args = parser.parse_args() + abspath = os.path.abspath(args.path) + data = abspath + tasks = [ + Task(get_repo_dependency_graph), + Task(enrich_dependency_graph), + Task(convert_graph_from_code_graph, repo_path = abspath), + Task(index_data_points), + Task(summarize_code, summarization_model=SummarizedContent), + ] + pipeline = run_tasks(tasks, data, "cognify_pipeline") + + asyncio.run(write_results(abspath, pipeline)) From 8466764cbef5e3e1b9876ba8c95310f020f97751 Mon Sep 17 00:00:00 2001 From: 0xideas Date: Sun, 24 Nov 2024 19:26:40 +0100 Subject: [PATCH 04/52] refactor: Cog-547 code graph enrichment task clean --- examples/python/code_graph_pipeline.py | 92 +++++++++++++++----------- 1 file changed, 55 insertions(+), 37 deletions(-) diff --git a/examples/python/code_graph_pipeline.py b/examples/python/code_graph_pipeline.py index e4fe29840..221ff8385 100644 --- a/examples/python/code_graph_pipeline.py +++ b/examples/python/code_graph_pipeline.py @@ -1,11 +1,8 @@ -import argparse import asyncio import os -import cognee -import json +from uuid import UUID, uuid4 import numpy as np -from networkx.classes.digraph import DiGraph from cognee.modules.pipelines import Task, run_tasks from cognee.shared.CodeGraphEntities import CodeFile, CodeRelationship, Repository @@ -16,49 +13,70 @@ from cognee.tasks.code.get_local_dependencies_checker import ( from cognee.tasks.graph.convert_graph_from_code_graph import ( convert_graph_from_code_graph, ) -from cognee.tasks.repo_processor.get_repo_dependency_graph import ( - get_repo_dependency_graph, -) -from cognee.tasks.repo_processor.enrich_dependency_graph import enrich_dependency_graph + from cognee.tasks.summarization import summarize_code -from cognee.tasks.storage import index_data_points +from cognee.tests.tasks.graph.code_graph_test_data_generation import ( + code_graph_test_data_generation, +) async def print_results(pipeline): async for result in pipeline: print(result) -async def write_results(repo, pipeline): - output_dir = os.path.join(repo, "code_pipeline_output", "") - os.makedirs(output_dir, exist_ok = True) - async for code_files, summaries in pipeline: - for summary in summaries: - file_name = os.path.split(summary.made_from.extracted_id)[-1] - relpath = os.path.join(*os.path.split(os.path.relpath(summary.made_from.extracted_id, repo))[:-1]) - output_dir2 = os.path.join(repo, "code_pipeline_output", relpath) - os.makedirs(output_dir2, exist_ok=True) - with open(os.path.join(output_dir2, file_name.replace(".py", ".json")), "w") as f: - f.write(json.dumps({"summary": summary.text, "source_code": summary.made_from.source_code})) -async def reset_system(): - await cognee.prune.prune_data() - await cognee.prune.prune_system(metadata=True) - return(True) +def create_code_file(path, type): + abspath = os.path.abspath(path) + with open(abspath, "r") as f: + source_code = f.read() + code_file = CodeFile(extracted_id=abspath, type=type, source_code=source_code) + return (code_file, abspath) + + +async def get_local_script_dependencies_wrapper(script_path, repo_path): + dependencies = await get_local_script_dependencies(script_path, repo_path) + return (script_path, dependencies) + + +async def scan_repo(path, condition): + futures = [] + for root, dirs, files in os.walk(path): + for file in files: + if condition(file): + futures.append( + get_local_script_dependencies_wrapper( + os.path.abspath(f"{root}/{file}"), path + ) + ) + results = await asyncio.gather(*futures) + + code_files = {} + code_relationships = [] + for abspath, dependencies in results: + code_file, abspath = create_code_file(abspath, "python_file") + code_files[abspath] = code_file + + for dependency in dependencies: + dependency_code_file, dependency_abspath = create_code_file( + dependency, "python_file" + ) + code_files[dependency_abspath] = dependency_code_file + code_relationship = CodeRelationship( + source_id=abspath, + target_id=dependency_abspath, + type="files", + relation="depends_on", + ) + code_relationships.append(code_relationship) + + return (Repository(path=path), list(code_files.values()), code_relationships) + if __name__ == "__main__": - - parser = argparse.ArgumentParser(description="Process a file path") - parser.add_argument("path", help="Path to the file") - - args = parser.parse_args() - abspath = os.path.abspath(args.path) - data = abspath tasks = [ - Task(get_repo_dependency_graph), - Task(enrich_dependency_graph), - Task(convert_graph_from_code_graph, repo_path = abspath), - Task(index_data_points), + Task(scan_repo), + Task(convert_graph_from_code_graph), Task(summarize_code, summarization_model=SummarizedContent), ] + data = ("cognee", lambda file: file.endswith(".py")) pipeline = run_tasks(tasks, data, "cognify_pipeline") - - asyncio.run(write_results(abspath, pipeline)) + asyncio.run(print_results(pipeline)) From 0fb47ba23d9b428d5bfb6047b8676751d0672cd4 Mon Sep 17 00:00:00 2001 From: 0xideas Date: Sun, 24 Nov 2024 20:50:32 +0100 Subject: [PATCH 05/52] feat: COG-548-create-code-graph-to-kg-task (#7) Co-authored-by: Boris Arzentar --- .github/workflows/test_python_3_10.yml | 4 +- .github/workflows/test_python_3_11.yml | 4 +- .github/workflows/test_python_3_9.yml | 4 +- cognee/shared/CodeGraphEntities.py | 5 +- .../graph/convert_graph_from_code_graph.py | 83 ++++--------------- cognee/tasks/repo_processor/__init__.py | 4 + .../get_repo_dependency_graph.py | 4 +- cognee/tasks/summarization/summarize_code.py | 8 +- examples/python/code_graph_pipeline.py | 39 +++++---- 9 files changed, 52 insertions(+), 103 deletions(-) diff --git a/.github/workflows/test_python_3_10.yml b/.github/workflows/test_python_3_10.yml index a7ea005ef..83d794c03 100644 --- a/.github/workflows/test_python_3_10.yml +++ b/.github/workflows/test_python_3_10.yml @@ -56,8 +56,8 @@ jobs: - name: Run integration tests run: poetry run pytest cognee/tests/integration/ - - name: Run summarize_code test - run: poetry run pytest cognee/tests/tasks/summarization/summarize_code_test.py + - name: Run convert_graph_from_code_graph test + run: poetry run pytest cognee/tests/tasks/graph/convert_graph_from_code_graph_test.py env: ENV: 'dev' LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} diff --git a/.github/workflows/test_python_3_11.yml b/.github/workflows/test_python_3_11.yml index 18b04cd94..9483cb687 100644 --- a/.github/workflows/test_python_3_11.yml +++ b/.github/workflows/test_python_3_11.yml @@ -56,8 +56,8 @@ jobs: - name: Run integration tests run: poetry run pytest cognee/tests/integration/ - - name: Run summarize_code test - run: poetry run pytest cognee/tests/tasks/summarization/summarize_code_test.py + - name: Run convert_graph_from_code_graph test + run: poetry run pytest cognee/tests/tasks/graph/convert_graph_from_code_graph_test.py env: ENV: 'dev' LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} diff --git a/.github/workflows/test_python_3_9.yml b/.github/workflows/test_python_3_9.yml index 54194bd19..c4f6d271a 100644 --- a/.github/workflows/test_python_3_9.yml +++ b/.github/workflows/test_python_3_9.yml @@ -56,8 +56,8 @@ jobs: - name: Run integration tests run: poetry run pytest cognee/tests/integration/ - - name: Run summarize_code test - run: poetry run pytest cognee/tests/tasks/summarization/summarize_code_test.py + - name: Run convert_graph_from_code_graph test + run: poetry run pytest cognee/tests/tasks/graph/convert_graph_from_code_graph_test.py env: ENV: 'dev' LLM_API_KEY: ${{ secrets.OPENAI_API_KEY }} diff --git a/cognee/shared/CodeGraphEntities.py b/cognee/shared/CodeGraphEntities.py index f0061406e..9052cf89e 100644 --- a/cognee/shared/CodeGraphEntities.py +++ b/cognee/shared/CodeGraphEntities.py @@ -1,16 +1,13 @@ -from typing import Any, List, Literal, Optional, Union - from cognee.infrastructure.engine import DataPoint - class Repository(DataPoint): path: str - class CodeFile(DataPoint): extracted_id: str # actually file path type: str source_code: str + part_of: Repository _metadata: dict = { "index_fields": ["source_code"] diff --git a/cognee/tasks/graph/convert_graph_from_code_graph.py b/cognee/tasks/graph/convert_graph_from_code_graph.py index 91eaf660a..bc8544994 100644 --- a/cognee/tasks/graph/convert_graph_from_code_graph.py +++ b/cognee/tasks/graph/convert_graph_from_code_graph.py @@ -1,86 +1,35 @@ -from uuid import UUID, uuid4 import os import networkx as nx -from cognee.infrastructure.databases.graph import get_graph_engine -from cognee.modules.graph.utils import ( - expand_with_nodes_and_edges, - retrieve_existing_edges, -) from cognee.shared.CodeGraphEntities import CodeFile, CodeRelationship, Repository -from cognee.shared.data_models import Edge, KnowledgeGraph, Node from cognee.tasks.storage import add_data_points async def convert_graph_from_code_graph( graph: nx.DiGraph, repo_path: str ) -> tuple[str, list[CodeFile], list[CodeRelationship]]: + code_objects = code_objects_from_di_graph(graph, repo_path) - repo, nodes, edges = code_objects_from_di_graph(graph, repo_path) + add_data_points(code_objects) - graph_engine = await get_graph_engine() - - code_knowledge_graph = build_code_knowledge_graph(nodes, edges) - repo_and_knowledge_graph = [(repo, code_knowledge_graph)] - - existing_edges_map = await retrieve_existing_edges( - repo_and_knowledge_graph, graph_engine - ) - - graph_nodes, graph_edges = expand_with_nodes_and_edges( - repo_and_knowledge_graph, existing_edges_map - ) - - if len(graph_nodes) > 0: - await add_data_points(graph_nodes) - - if len(graph_edges) > 0: - await graph_engine.add_edges(graph_edges) - - return nodes + return code_objects -def convert_node(node: CodeFile) -> Node: - return Node( - id=str(node.id), - name=node.extracted_id, - type=node.type, - description=f"{node.source_code = }", - properties={}, - ) - - -def convert_edge(edge: CodeRelationship, extracted_ids_to_ids: dict[str, UUID]) -> Edge: - return Edge( - source_node_id=str(extracted_ids_to_ids[edge.source_id]), - target_node_id=str(extracted_ids_to_ids[edge.target_id]), - relationship_name=f"{edge.type}_{edge.relation}", - ) - - -def build_code_knowledge_graph(nodes: list[CodeFile], edges: list[CodeRelationship]): - extracted_ids_to_ids = {node.extracted_id: node.id for node in nodes} - graph_nodes = [convert_node(node) for node in nodes] - graph_edges = [convert_edge(edge, extracted_ids_to_ids) for edge in edges] - return KnowledgeGraph(nodes=graph_nodes, edges=graph_edges) - - -def create_code_file(path, type): +def create_code_file(path, type, repo): abspath = os.path.abspath(path) - print(f"{path = } - {abspath = }") + with open(abspath, "r") as f: source_code = f.read() - code_file = CodeFile(extracted_id=abspath, type=type, source_code=source_code) - return (code_file, abspath) - -def create_code_relationship( - source_path: str, target_path: str, type: str, relation: str -): - return CodeRelationship( - source_id=source_path, target_id=target_path, type=type, relation=relation + code_file = CodeFile( + extracted_id = abspath, + type = type, + source_code = source_code, + part_of = repo, ) + return code_file + def code_objects_from_di_graph( graph: nx.DiGraph, repo_path: str @@ -88,18 +37,18 @@ def code_objects_from_di_graph( repo = Repository(path=repo_path) code_files = [ - create_code_file(os.path.join(repo_path, path), "python_file")[0] + create_code_file(os.path.join(repo_path, path), "python_file", repo) for path in graph.nodes ] code_relationships = [ - create_code_relationship( + CodeRelationship( os.path.join(repo_path, source), os.path.join(repo_path, target), "python_file", - graph.get_edge_data(source, target, v)["relation"], + graph.get_edge_data(source, target)["relation"], ) - for source, target, v in graph.edges + for source, target in graph.edges ] return (repo, code_files, code_relationships) diff --git a/cognee/tasks/repo_processor/__init__.py b/cognee/tasks/repo_processor/__init__.py index 94dab6b3f..a1aeabcdc 100644 --- a/cognee/tasks/repo_processor/__init__.py +++ b/cognee/tasks/repo_processor/__init__.py @@ -1,3 +1,7 @@ import logging logger = logging.getLogger("task:repo_processor") + +from .enrich_dependency_graph import enrich_dependency_graph +from .expand_dependency_graph import expand_dependency_graph +from .get_repo_dependency_graph import get_repo_dependency_graph diff --git a/cognee/tasks/repo_processor/get_repo_dependency_graph.py b/cognee/tasks/repo_processor/get_repo_dependency_graph.py index b36607ab8..7f96bd49c 100644 --- a/cognee/tasks/repo_processor/get_repo_dependency_graph.py +++ b/cognee/tasks/repo_processor/get_repo_dependency_graph.py @@ -1,8 +1,6 @@ import os import aiofiles - import networkx as nx -from typing import Dict, List from cognee.tasks.repo_processor.get_local_dependencies import get_local_script_dependencies @@ -35,6 +33,7 @@ async def get_py_files_dict(repo_path): return py_files_dict + def get_edge(file_path: str, dependency: str, repo_path: str, relative_paths: bool = True) -> tuple: if relative_paths: file_path = os.path.relpath(file_path, repo_path) @@ -58,4 +57,5 @@ async def get_repo_dependency_graph(repo_path: str) -> nx.DiGraph: dependencies = await get_local_script_dependencies(os.path.join(repo_path, file_path), repo_path) dependency_edges = [get_edge(file_path, dependency, repo_path) for dependency in dependencies] dependency_graph.add_edges_from(dependency_edges) + return dependency_graph diff --git a/cognee/tasks/summarization/summarize_code.py b/cognee/tasks/summarization/summarize_code.py index 597bc4026..31b86d325 100644 --- a/cognee/tasks/summarization/summarize_code.py +++ b/cognee/tasks/summarization/summarize_code.py @@ -1,5 +1,5 @@ import asyncio -from typing import Type, Union +from typing import Type from uuid import uuid5 from pydantic import BaseModel @@ -23,9 +23,9 @@ async def summarize_code( summaries = [ CodeSummary( - id=uuid5(file.id, "CodeSummary"), - made_from=file, - text=file_summaries[file_index].summary, + id = uuid5(file.id, "CodeSummary"), + made_from = file, + text = file_summaries[file_index].summary, ) for (file_index, file) in enumerate(code_files) ] diff --git a/examples/python/code_graph_pipeline.py b/examples/python/code_graph_pipeline.py index 221ff8385..fe4be371b 100644 --- a/examples/python/code_graph_pipeline.py +++ b/examples/python/code_graph_pipeline.py @@ -1,37 +1,30 @@ +import argparse import asyncio import os -from uuid import UUID, uuid4 - -import numpy as np from cognee.modules.pipelines import Task, run_tasks -from cognee.shared.CodeGraphEntities import CodeFile, CodeRelationship, Repository +from cognee.shared.CodeGraphEntities import CodeRelationship, Repository from cognee.shared.data_models import SummarizedContent from cognee.tasks.code.get_local_dependencies_checker import ( get_local_script_dependencies, ) from cognee.tasks.graph.convert_graph_from_code_graph import ( + create_code_file, convert_graph_from_code_graph, ) - -from cognee.tasks.summarization import summarize_code -from cognee.tests.tasks.graph.code_graph_test_data_generation import ( - code_graph_test_data_generation, +from cognee.tasks.repo_processor import ( + enrich_dependency_graph, + expand_dependency_graph, + get_repo_dependency_graph, ) +from cognee.tasks.summarization import summarize_code + async def print_results(pipeline): async for result in pipeline: print(result) -def create_code_file(path, type): - abspath = os.path.abspath(path) - with open(abspath, "r") as f: - source_code = f.read() - code_file = CodeFile(extracted_id=abspath, type=type, source_code=source_code) - return (code_file, abspath) - - async def get_local_script_dependencies_wrapper(script_path, repo_path): dependencies = await get_local_script_dependencies(script_path, repo_path) return (script_path, dependencies) @@ -72,11 +65,17 @@ async def scan_repo(path, condition): if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Process a file path") + parser.add_argument("path", help="Path to the file") + + args = parser.parse_args() + abspath = os.path.abspath(args.path or ".") tasks = [ - Task(scan_repo), + Task(get_repo_dependency_graph), + Task(enrich_dependency_graph), + Task(expand_dependency_graph), Task(convert_graph_from_code_graph), - Task(summarize_code, summarization_model=SummarizedContent), + Task(summarize_code, summarization_model = SummarizedContent), ] - data = ("cognee", lambda file: file.endswith(".py")) - pipeline = run_tasks(tasks, data, "cognify_pipeline") + pipeline = run_tasks(tasks, abspath, "cognify_code_pipeline") asyncio.run(print_results(pipeline)) From 64b8aac86f3e3b19391c813b39f5ab0777caa4d6 Mon Sep 17 00:00:00 2001 From: Boris Date: Wed, 27 Nov 2024 09:32:29 +0100 Subject: [PATCH 06/52] feat: code graph swe integration Co-authored-by: hajdul88 <52442977+hajdul88@users.noreply.github.com> Co-authored-by: hande-k Co-authored-by: Igor Ilic Co-authored-by: Vasilije <8619304+Vasilije1990@users.noreply.github.com> Co-authored-by: Igor Ilic <30923996+dexters1@users.noreply.github.com> --- .data/multimedia/example.png | Bin 0 -> 10784 bytes .data/multimedia/text_to_speech.mp3 | Bin 0 -> 28173 bytes .../test_cognee_multimedia_notebook.yml | 63 +++++++ .github/workflows/test_python_3_10.yml | 7 +- .github/workflows/test_python_3_11.yml | 7 +- .github/workflows/test_python_3_9.yml | 7 +- README.md | 52 ++++-- .../databases/graph/get_graph_engine.py | 2 +- .../databases/graph/neo4j_driver/adapter.py | 48 ++++- .../databases/graph/networkx/adapter.py | 38 +++- .../vector/lancedb/LanceDBAdapter.py | 51 +++--- .../vector/pgvector/PGVectorAdapter.py | 61 +++++++ .../vector/pgvector/create_db_and_tables.py | 2 - .../infrastructure/databases/vector/utils.py | 26 +++ .../infrastructure/engine/models/DataPoint.py | 1 + cognee/infrastructure/llm/openai/adapter.py | 6 + .../modules/graph/cognee_graph/CogneeGraph.py | 98 +++++++++- .../graph/cognee_graph/CogneeGraphElements.py | 16 +- cognee/modules/graph/utils/__init__.py | 1 + .../graph/utils/convert_node_to_data_point.py | 23 +++ .../graph/utils/get_graph_from_model.py | 45 ++++- cognee/pipelines/__init__.py | 0 cognee/pipelines/retriever/__init__.py | 0 .../retriever/diffusion_retriever.py | 25 +++ cognee/pipelines/retriever/g_retriever.py | 25 +++ .../retriever/two_steps_retriever.py | 119 ++++++++++++ cognee/shared/CodeGraphEntities.py | 20 ++- .../code/enrich_dependency_graph_checker.py | 4 +- .../code/expand_dependency_graph_checker.py | 4 +- .../code/get_repo_dependency_graph_checker.py | 4 +- cognee/tasks/documents/classify_documents.py | 47 ++++- .../graph/convert_graph_from_code_graph.py | 54 ------ cognee/tasks/graph/extract_graph_from_data.py | 6 +- cognee/tasks/repo_processor/__init__.py | 2 +- .../repo_processor/enrich_dependency_graph.py | 98 ++++++++-- .../repo_processor/expand_dependency_graph.py | 56 +++--- .../get_repo_dependency_graph.py | 61 ------- .../get_repo_file_dependencies.py | 87 +++++++++ cognee/tasks/storage/add_data_points.py | 11 +- cognee/tasks/storage/index_data_points.py | 26 ++- cognee/tasks/summarization/summarize_code.py | 14 +- .../graph/code_graph_test_data_generation.py | 51 ------ .../convert_graph_from_code_graph_test.py | 27 --- .../graph/get_graph_from_huge_model_test.py | 100 +++++++++++ .../graph/cognee_graph_elements_test.py | 4 +- .../unit/modules/graph/cognee_graph_test.py | 4 +- docker-compose.yml | 2 +- evals/eval_swe_bench.py | 69 +++++-- evals/eval_utils.py | 56 +++++- examples/python/code_graph_pipeline.py | 67 ++----- examples/python/dynamic_steps_example.py | 53 ++---- examples/python/multimedia_example.py | 48 +++++ examples/python/simple_example.py | 37 +++- notebooks/cognee_demo.ipynb | 38 ++-- notebooks/cognee_llama_index.ipynb | 25 +-- notebooks/cognee_multimedia_demo.ipynb | 169 ++++++++++++++++++ poetry.lock | 5 - 57 files changed, 1494 insertions(+), 478 deletions(-) create mode 100644 .data/multimedia/example.png create mode 100644 .data/multimedia/text_to_speech.mp3 create mode 100644 .github/workflows/test_cognee_multimedia_notebook.yml create mode 100644 cognee/infrastructure/databases/vector/utils.py create mode 100644 cognee/modules/graph/utils/convert_node_to_data_point.py create mode 100644 cognee/pipelines/__init__.py create mode 100644 cognee/pipelines/retriever/__init__.py create mode 100644 cognee/pipelines/retriever/diffusion_retriever.py create mode 100644 cognee/pipelines/retriever/g_retriever.py create mode 100644 cognee/pipelines/retriever/two_steps_retriever.py delete mode 100644 cognee/tasks/graph/convert_graph_from_code_graph.py delete mode 100644 cognee/tasks/repo_processor/get_repo_dependency_graph.py create mode 100644 cognee/tasks/repo_processor/get_repo_file_dependencies.py delete mode 100644 cognee/tests/tasks/graph/code_graph_test_data_generation.py delete mode 100644 cognee/tests/tasks/graph/convert_graph_from_code_graph_test.py create mode 100644 cognee/tests/unit/interfaces/graph/get_graph_from_huge_model_test.py create mode 100644 examples/python/multimedia_example.py create mode 100644 notebooks/cognee_multimedia_demo.ipynb diff --git a/.data/multimedia/example.png b/.data/multimedia/example.png new file mode 100644 index 0000000000000000000000000000000000000000..4d406cafdeb7e6d778ff3fa37d99d42088673bf9 GIT binary patch literal 10784 zcmeHsXEdB&*S<&)(UOQnlnA0561@|m_cDXgqMOm8cM{Qi^e%cCLoh@1XwkbN$|ylF zdKtaE@_U~3U(b3z{nz{XJ)iDh$#V`1lx$#h#s=ot&IpUS6J`pLce4nwgpL^Yb$^ zGlRk4xVX5wx;i5xBRf00+1c5FfdOxCZv+DI=g*(`_;?Z$5=BMD$jHdKxw*W&y!Y?l zKYjYNudgpGEX>5jgolU6(9p1}s|$rf<>%*HSy@FyL<9u|g@lA`Z*Px{jm^x=1Ox;$ zH#b9}Ph5dOG_XS zI66A|>C>l-jEud#Jq!k8Zf>5Km}qZrpOcd#Dk=&Bfrf^LDl01)7#Os*wLgCRC@(Kx zR#vvQwx+DC?C0kPfk5DJxUaA8*47pc4UL0?Lw$Yy&dyGHdivqvVP+{{PE*QNlA&Pr|00{;K9K`XlQ6!T3U2;bZu>IZfU3+@$r?Fm6)__IvgC93B`ABbv!e-vo_41bj@@{D&E(*HxL(ek5}q5 zE^R&W!-r2u;=jImX=_2^6!h`sqZOb&-`AI{->dQ$y@3;vJ#=T2hZAF)oNcV_nG2yR z#f`-cMdz+=KsT&Q(5*N9@5g&S%7k7EsPxU06_5F@fmPX;1{JHhtD%m&?VfCwe(M3) zUB(jtFRJGr8_WEznAl4dB`9|EA~_9Oa0d*3VuO{q3kMM+ymLN*IPk&E!%{E`Z7^)Q z_ax53FV5K9PP?{Lt}}dBJL2c*R7Opkv$pT zV`4K&8)tqp?nL`KEAM+26{>7t9%U6e@wK)(_FPs>+ zjd<0FKJ1L~-`J(m*l1FcXKSj3325$2yEOtsinn|4!Gm1r(&L?9uPL{X#g2h}J@XVk zTM<>2Cl_P83w3<{^*VQDk>EN#DreWBV;G+Jo`^1>xlNg1Vk~~$Lp1cr(YFuk(1 zXBLx_93xm?I}ccGgIK9{b0WEi$^6Uh#kcY&_~of1&PPXKa5 zdl>Dpoz93BNr0y0;~BfC(ENI6@#m(`(unrr)UL+Nv6jo4^e9_mNEdiRWx}AzDN0 z%@4LSC3H>2k6|1^BV)@n{WX_(0AM}`44A-6ug4lJ02mtL(H{;vtVh025jkzC>>5w9 z4j+xMs@o(GF+huu6EGG^)PIg-tUR%RPagb!Sa~4%r}^h*-coDMNJOz`3UB6kt&ive z8BYPqo*Q2wALXY*vlc{I6Zya`>LKRdP60`@p{wCdY5Iw;5MaLgQhQ%tlDk=0p60^P zkE_73^Z=j{{OjlyB<$D+q*mqmS#zsazTYFO}f0qie&VnvfhMW0g|3HHORMJwsa zEL}g?Tz$;VrK9XBzg1P7Soyih;Wqw@>To%h3}12C7o(aS%vgh;x)LzWTCtZr)?YK1 z!aZh5QtZ;Y2x}eSu zCPqI2Ip#jps;uD9MV^sZResi5F!0Dwm(2imy+HnfoJ)#YB%1dM8AWEa%%>sBX5C(O z>q<+qbnv^j>hl5$P#)N_ETaS48Z3!+Z%U81+|1Ke8lS+( z3O{RZnLY|Mi`t&vk^U|6^BKRITXvYb82o!RDX z18=`J(W#s5IudSU4Xp_DT1w>IK@jb;dK4?bWW0mR;(ccUFY?_lV@tFH>>a(|D!?4# z6()6N3_$5RN)}l$SD&u&prz`L)PF?V0%W=$;kAq?z^IQRa@O-C(iLN*HG9th%q)15 z?0)tc=u7;Y_MFeQX#sH!_X0HOtdn;Xne}K3PT=4`mtrWM-&$jv8wvo7(Q%gC2l}(e zC-XHkxA=({rDPL(-+{MYUmoD2U%VP+g*)&W2&5j*5h5W{e%T)0Z{m!%-m@XE!TPZ6 zo%dG`!?%3Fh2%-O}G7xR&Qe)_^vItlg;)3K`qY zZ@+C(To2JYxs`@oKW{8wtH^YJ7PKe<6H^II5tRvVW$bi3|2b7#9(K+ zgmqcugD4l}re0=!p+T#yby1mK8o5~8e!JS2f;|^~_++eXIuy|hwm&}WCf6bvXt31TBirz9Sh$40w9OX0{zWnRecW)mKm{Z+64Wx!QR?yYEx^>|(=Z|l}7 zJg|h{Xo`*flEX{Wz-Ygg<7?D#-1JcSl&hLESqQa0wJmwZQnnW^)c|4%byVa(&q){K z?-9_j^iK93mN#4m92t#I-DX!egFZAlE_{YS00f&Gg|CIorz-S4_vn~fgT){llq>b@ z!4C(Fp-mdpW`3-?aq1}-YryJJD$!(#$YnAU<+UV%5GaqsK@BwwJO`mXoyHz z-Gc(8V;&?)XsPnU{;~ge3*MXyGcdZK_Udh2*V$;|hNyd5Je1THmEc@A%PYTZ(^_UU zpy&6U-KM% z&}nRXsN@s7vlo8OlyIa<57N}RBa3Jd;d528FBQcc#)wpA8G31uf3m>p^T8jpWO`3o z31zCi#+r^Jluiy!NNe6R#u?lB$e>1A?%TROHWCggpK*$|(FzC*&~Vw3Pr3(8;#Ors)V)9e<^!J9qd(u6FS4vxd6DRXHmVwop=2BJx8f*ZAREZ~d}o z2BEHpd?T1rjjVUWu=!Qx{6Q7|w44!)GLPrT$Z-lnL>7M9RiLwwGusHrLn(H&ym&(~ zeQjgCfAO-LFHx#X-^BFQQ5?N0_Z2u85@dk=6i59 zcexG`JaJ<`v~#J47nF~ZEEuG%%d=GNEl-(X=Z^|)$*0g6?d^XA*V?Xkt~QDoFxK=_ zsQ)D3#~2MFD{3Qr%bFYt& z&#vF8dqKIv5+3XcW>OOhXB9k|c*mQv&ZMaVJ6fdYGlY&D-U+cf-hYMrR9*q5%16l5 zitS@)AaypRsB$exqi#=;`-&Pw%FU!Y7+LQnpSj19DBi zDeF+nuHBojVgaLYYQrVBB1+yCsaSJ6wFi8tNnP2r2VZW0;ls*;oOD0zaWXz8D=Waf z|AhS+AoE6jOj}}sq~Avu$t>g4pywq}Ms&$T-imNFVa!;(`dGw=1*qL#exn($o@A}| zk4txL;_3RgO5Rk3spAjJ$L=D?KT|A|A2!l?M+_tVmzuaR%PeR)=*HDgWj;4Pu?Oa@ z2Clwt(kLnj+adZwAbR3B9ZNA^hgCHO0D) z90SCIT_`c1FLemNa|A~Sm{-|(l(}w7%jMH?dMKQKY7jW>!jk2fl(IMHy>2>khJ ze0S)S^X!3grfhf{&~&r*T=`kctk#xKXmX~vKS=3dB1QCxiGIy>Ba!5_VEv)Erpd+O za2t&+S4APuyfBW)Y{ROYzsPR!EB;@^vpT$eLFoU}+CS}ni2_G-otvp(Z z$Q#IXJF8>5HV1y|3-XeE)U=FlFlV;y{sIM}eXcroGUnGUNN~F2uOurI+YBcjGNRZ? zfB@sl9V|g1>_jLJzauB+ccmvz+lJ{Eq#BLrKv`20;(6{MRAy$3nMH z+CUBC_1KaMGoav?OWi&93xBifq4a0d`>gp|&Pw4VWHbCoe5{(1 zyQ@4Z6rr|M?L}{6%K9o}+4>@Tdf>|x8`<+?9Zs7BN%=ZHtkX&#pIiYdUE53eU`qCd z3Y=M~%JLPMHYX3|__^-R6(-Vxwk!Jq_W3wQKv)D^=&@NQ3k%n}gn`A|9PwZHIufXM zZI`wG`e-_D+>&WR5CSsSgU{e99gx`|QFWDF0`UN~DgphZJMf?1z zJ$PO9;6ol<;rl_$`YLR)a1?f-Rx#j&i+t`Rac%mm+w^pmVAFT2iTdg2^m@a zY2Xv7q6U-hrHYOhvnXPQPzOO<){3E=p%ay-r->qPt%(|3`PJ}?HT}#0I^fe{!@1o2 zeJIx^vFW87vkAAywAlKiKApPODt8w3?%*B?XhQBtQd60K{T0Zgi6zUG*R}Hk)cY@e z?3>>EBT3IZ?d?lTC2UEK6MUr;LV~(G_L!S%u)H!y)f~Y0~ zm=Dgu5vXiMK^b{N*uNK_WKSW!u_Ol@0c%IW@U0EQyg4?puab?jh>?sdqxSo`9&i0Nwc@#( zJHP|myX!QX@=HkH2M-mvo6ZRgI{$2|X(?||{VD)|vPhVrEj`knBc^==FM5?ZVCL-u z>=R6XCob#NpK#r0gZ!Wv=A;ub&Pe#>b6n1Ggd>HfF$|00F)7g19CXsugI8?e0TCn{(Ym|b@Hp%RDZJIS&@ump}27It2F<5?~h0mpNFStm+AFS=WvAcMgIqGK} z!K<=^+HeS%X;0~R51=qpX3e4puvK?dXVG#CAin&zS`UNrGud*YZ2& zbuAVNR}Qc-`dbg?a^oEWn9D0PlF&fXGz`A%)COPas%ecJAo@E%XW$l96jZ4lqOXD~ z!;XH{61^?F9T2Vb-wDI1Ff1w*Ky@ZCEiW2BJO+ZAFVea^#JxSLheSCmWcu6W%zF-p z9GK^8f$0kxuBNTAc$1hq`r%qtu$D|!lPp^Ku|r5q2}t{~TEAHt$bRKnHGYpx$%mpD;7_5sZ*1*Gg+L83eiDAwK@(FIgyRQ?C{pxPmS-ahSy(&iMV@g4vu= z3}Y_!wY^`M-eg`Y&&GeU;?7W7==wSjWlIQe5Y=*AH1D_HV9%*-Pn^O~QkfojaEVQ) z6;j@SU?kclB*g}I{zVNR)v8?7d=%JR>^a;@HvOSe!_{;6*tbN#bYTN!CG-g$(d=$8 zopizX=OTGvRmJaS@M~ytsl<*2oT~MucM#QhSq;Pwpwm61>U2>e*Jqn#=q*5%Q%H(t zVt_lCMp~iGl=p{;zJUu{ZphqM=WX`K^IwXN@PBu^#6F?Fe5G9fcR5DFS()?-CVs$9 z8ZE3eK6Z4Qq<$fExJ*GBGuy*9*ouj}Kic&S3B|8E=7R}B&zm(?6%S$? z|DRPueErl|^jGfhg|1B9?`W3eX+{*Gbaf=MV$3}JubSrRr4aNm6c z1JSFHQTd}IyMy=<3i92t>aZcL#`}nLBLH^YZA_B;qZm)@h<>r0l%QLL!3Mpi?h1q@ z`jzLz$jfCGZ*cmJTnNj zg8R9MTaC)nZcOF5Y*cCcS1*w@iBcY&tt@}Z&uNTT5&Byk@VV%=q`vDI`g4anbnb;f zS6XJ{7>wEo0>-?7cPrJiArNeB~&WXDR2I zG?{5x%T%hRLyqsInQjRA1>)H^=StC7^Y4e#M6@W+J3 zHvF0q1t{5LN*6bdHF=+j%j+Oe=yuo0u~XcAMA`m!Y3e&BAwU!4g)UA`L~`NJ#NBP? zGR8Vc4G4}pC-wPhcI`!*A zf5B0&#VeG$#Zjy*;G?9tgi_jVvek%W(vk`f(ymTrl^w(6s-N!B4yAbeiP9lV=g1nC z7fP<_mERYl!gKZwO>e$@6Mz^$pTRK&2hgUH`;40nNdd?}$e{+_)6+p`!Y3!g)Gin5 zjV=4fi`cdFarei+M<$4v?%GBTY&+&wAovK1vd^9EX3i=`2EW;~J83CtzS+IlW#CFl zAi#HgroAw@a6BxtcN&_LUXg8SwG-fH@Lu7PTa8i&V6-C}%Tky%d%k-saqtnJBO1^G zW8_8w!*}Cemw>FC=o-IvBmDy&KE?<3AMRMby+IxL+))3O5_PGZ%W%c~-4Gp^D|^Df zcWeJSy2@3U$w~M5;(9MNT-SUSOH~2^jBa4jgO!Ao9qeh!O`yHu_SNy{yN_&!yoAQq zMpzsD?p68Hw(4#fnhq@bvMn@CEo(`vupSwT1ODiVQEbXzD)2%Lnod zs5jNc{3y6MXNqo*2a_mxfQi!D(q65<_Rj!mA*`;V4`<+qRjk!)=GgXtdVe+Ve*VTL z->|P6UyiEX zkG!0Dlt@zYaI!2_-g00`J{q*~WEYj|+k*)SRE!4^%{_16w;1yNj5z5QKGS<$PRRxa zY$BeHWAX*0yB4p(#^%-{%O7 zIMW0S4j04>oa5hSf0Nb(nl}CzQ^F(6wWbQdFn|_*Y=r+v-3IWJ zZl9?47Y5O7V*PK;3GO(mTCmluNi#6Yh(Q=}U;*xI-|8lay{yjg2)AHdsdQGxjC^UL z*}nzUxme3nJW+WJL-Ha@V__&^-QP@^m>MFUy!wLn#K+>VY0#vOxiZklNp?4X$?C=l z<%)97;{zYxxgrg&Q*!(3&6^o7!zj z$(}tk`E}UubYlyr-?gKb40GDRo{xIsST6;e!2c#7?TS7aFBOE?yj0&Fo?cO{nLSF^ zs;04V`)kCj5q2^SF8)a^1j{`O0-gSVIeFN>XGPyfjmDs1=n{y_+!43!a!R5TBBVx7F+d~lfy8DMzaBlz$6 zt??xD-&FUWK&jzc5r3>9E2_&?M?6x)3} zUs>S5E*|I@n>kh>I0?Y29MpSLVK)3ebI>IFPZoc>qy_crMG#~#IOh6yFo#XhXsaqr zBJq>HLzpvI)dgH*^&!ONXd!KCP2#Pab>~T1w$^DQ7pLG;{nns5rhnPSPHobc&EG7n zn^I=`kqj`9T~w>y@2mi0v%Gsv^#~qOr3U-0wg+0kcbG4<0S-Vuh^GBq5r^I~GMLu( zd=jsFMu<7n)9>}b^fwjsQipbh_RsS8d1q6&Gj9^4zrKFbHD$U>`RFrNB?Q44ib&TI zSgfq#$phBb@V5kTXHHQJJMj{}H+Tp8qnW1%g(l65)Rm@!WOk;i9~bjm<6GB@zomou zc-u*1SB<=7Zo;Y3Ur<(bIuL;U+HC&W9V9OTv&Zr6sk!S!;aamtg2oRr1IDZ8^67M* zu~F{{I{$4wRoAheNO!A;H_Gx9@-~gS$1}Ceq32og7L`ikFa}c zd<3<9#zj2X;e|UHG$&6UMa#i*2Q}f71x=68TLM$MmtZ~|C$VAMa`72%ONXNH;qQ}& zlkkPb{)+k|GavVjq0B^D>T=V25VyW0v{v>8KRvB9xcT3WCi^)?QE|umUlUV|SCC4( zr)K?ggqYU`kAB^mqgJw^`*pf1a_X6rDDUrm+rP|eo^Hq_EIFL_JIh`C)V*HZyx2KoO)617liG zC;(0h0tcrC=jzu|dm~bAB^#N$sL>k**e-@2Qz2n-uDHS_YwD&5t2rv;hwc*;10#4Z zixnrvO;r_PBG=0_8|{zW6}^_}PK_3qWbmt)t!fq$xES0$y za}mUV_)kR~(+s#61)0I)AVxfpBd6LDjEvM#m!zI7V^yf1<*aqxu-f`& zJ6O$oEL4n=Fxut$EORw4WJ3|fHHK6aEfjb{2EnQtxjZf&C?Suogzo+6U12xRF9p))nHS4GxT!LEgOeo66)^o`aYJSGItH+ zKImH;=Iny|Y-O$mFI(#Vt%^ZE?^NvHYxQ4VnR@10@&x&F#r4Y@CbBV{2#c@c761#qU4=?jMmmf5KTm;RFScmH#b_sM!DW)^*#D4@KyISN|VN z#Ffp^(<)R&-HP`8%K9|KBc1PvHN0{@tq2fn2nzZyA)CH__FqCW@SWUWJo0chmThuTR5i-JQApvKg;f(69|&i~M*O8Whkw(Kl|@$cOYiLai!C4dfL8S?&nLJqjF7iiu7D zCuK&Zz<^Il7xH9ra(RGB@WW9;w@FzLPX*FHLk9m+NQwz86yapy)m3?Kk&8+W*EGcl z|L%d@0P$;Suh4212NyoOAp~f%av=btqYtd66u%~cANDz5iChTdjG746&RddVDR7_< z9C=x(31q)`X%0hr5Tbh`i@>x>fS3{GWwvsWorgEIN^0^@YkYzUJ)Yh{$O>+kz+0k5 zE%r7{rlAl@8-Xi&rxI>ik+<6c%exC#O)=5Kwn?S zhM4-k&c19-+_*Mg@u>0KXrTKLFPSo8S#we^th%d%HG{ah9LZ1X4J@<$IoSnSMtU*? zi`(9tC-wnJGwO_HX!Ir|Pib2XaQ>Oxi$xvI90YF$BM>_^)Ys|-#y%p`zAf42G*Tvi zEFyaEK^=NwxF-a=9HhOSMKT|jPI+)FDy!epUqle)eG&$MNrxQzhAC-KurNZS0omD( zi({eDXaE@2vN@bp*<~m-1Zz>8l$qXI2z8P$5F=g-wI1n_VHZ_S-d|4wP>U$GNn=Vi za(>{a|2>oZ+6FhgA4GqyN`g`Ld*V->nY&3BDO;(yTzvX3pCJs*@&l6nQRK7Z1DzyUhGEd?ay2K^gwXH<@ln!p zUk#!^9-+U&#rdASnj1c(7Fo_cD_Dt!4l`BSjcf(izB-Q3I|E?!$-9b*&IZTzFq zW@#F??qt@!u;HW?`T&MB{})UWnJH-u&RT{c0G^V`nrV%?&M1nZ0@mp*(nAErR<9}; z3;$JDL$Q)q{|)d($;s|VRXqJS?|lb?W*I3)t%_8G4@=g%x$XO;TjHTIO*=Oaz4bci zte^^<(1CaB9CVDy7DV5nHH}A~25NAkfSQJ>+8Bu>^%o9~Z4}<~Nz_a_`>(!P79Lww z2R2etSDu8mzI#!P24_Hv8?uQNBICn(UFd`R{cjf1L-K4DC!}(dTo^~SZBz56NDzxj ztpE7=#D~BCSaa<7R|**8Ua8seQX*16c)51^e3-+H3Fi`Vui;*&G~ zr*mCC*ylHy0Y1eeQFrT~XGddlb?-l}P1g2RlS$|^eFe;_0_4#oY{^@(x=9wC~{)7iz|WA-9}u%QejlCx9a@!`(ZpHcVcGQMQKgaWA(eE_YH zMUizM_~*v2qBj|&w${l~om(blAJp6J)2NXi56FdnfzUEYAbs}85fD#c1K6%$pSMQi zBFVqt*tTnWMl_9yWmQwewXU}|f!(s8urgq{KNm)bvwd40iSt>fuY;5SnO_X2t|Qj= z&POlAebCSQ3>tbIa3o=BL`=+hp7G`-esvM;8BZw1{W*TljnkzfQ*1a3{KYQ+V*337mI0zwtjv;)3d^Br}z+y zLpFGytD1pyeh5|Yfe+>og5u!(xCYt7vek11=Y>fm7*a~4es5#k_pLSCLpuavbt z+$d1W9w`)!fPLKJrUBu{=9);Chp%Ades?B|fu8jF_Sw(~IOCEg0BZ)kor<2;SYiFN)ZZwM8^od1E zj_(RJWd;!G`Vr@f!T9Y@ZCao_yKw1WG;873_4+29LXtR zG7SAZq&Kt|tUGtmS??)Tk&I)bEKhU^xXy^U{Cs z^Za&l|9<-Yuxb}&b;F$Vee4?2Q%fy;5>7Ks%*q@aZk>9`bp9^D{Pghy;sJS9A(B;P z(?{FPM(gFgTl)|3i&zblBZ*_b z>-ert#Fr5ZtXR4}=)XdY80L2E*ZEqh0qKh~7{#*6Y!Qw#41{iRjh|wee+aZakU;e~ zq;FzGw^`$QekN?Q247q(~m z>cLIp3+79nfe(G4(MP5AU_Jb)`gLcUgm&t0NYCL5k~LGnfL2vhXPpXN4w&xe>#VOA z9#Z8pbZDj-QVBN;9WYQWQ3^SZi&I2SiGqt!Ecw_FRs!fFY4kEws~-l!5-4fS=GyXS!DS>6~$Js8Z zN{K9%tN(g@TfAT179c$d{C7@mua8d

3v5iqi+YJ%YUAZ&(>^9(g-cJFz$3m@`j7=$?ndfeS?qTWnnz$Y58|io&P9~DG7%1@8X8LpZ3k^1y%f&{P6gG9HJlPL|Q}e?b^*&lrKGPxVFxww$7`p z?tC5c1y4JU6zS2UK~bLTNgc{4n?-bIm1mPJ*2KjTau#d5pkzE#R3&BPEh?@W{|0E=ph7DB!==Tc@vBd`4%v^EhR;e<);7&Gs%Gu|W?Z2elz87Na8m(J3~fnD-S#Wy-eBhI2M zX8CYJE7W5&h*9#fUm`9|8_XbaU+Fd9(G}Nn5wCy#um9x*Iabe-9E&{vkpj4&)82+A zt!?nKj+zQtr)ReiFbwv)sQ*!DQuMQ&;r6sa>4(eUqEqmF<(}PkzwGkKbUwsav!#JM z(X~2Tc?_TwI%_>+H~Bm<9vtA?v%;Yk!+qqLzbL(zCcqkrxJ1|oBM^C?{vfKcZTNt6 zsiRUwGaYo>j+A&W*h9^UL*|h238C~<*CjsC)P2_RiKNC7=xo*|M+Ci;e~*V28~t}l zDOOzLHhV$D0rmTvC^4=|4fQYlMd$J4m9?j#*?sM!`S-nWKdMdR&C9ja=(v^PcpT1PLAZntoJjj zx$}>FvA7n0)ua7{27|E6*&O9&!vU3X#s-~(sBri*ndE1_$H|?458o@m>;~(wAbuNF zUd5pyWC^WvTNiJ*cLEIA7oH;{Q)b^6h89PNFbzcqF%?N{Xw47J2?L(bx0%@6ca1{k zN5o`qnX79J``z>d-_&~5B&^rgvreZXJs%*Bd;^kxQ}X!?Nk-pzQhc{a@3>Xgsx4;e zG*DzHaHKol`IG4=bNDjI%}dPUFv$QpE8Sl(olwn(!4>@bBhehcqeD!2Z_|=~b)KF) zS!^dN^vJokeTrj16=>0IBiyhrAmN14YRgzv{0I{N&LkoEJUS9LR=?@RUd^?DTk8Fo z+smncO!sFQNKYfJr+!|~#DbLGK9>v2nSj|(?btp_EL*(isP<@*lKE|jHoL5jpu59l z47TuL$VR8kAkH)8OvJ-@^FL7Io?v{5nGlW%gQvO8C8P@u0=%v1g{`MXaTk%QtrRn& z-_HQ`Jj%7~d12CnaoOe+6pl7_WFD%eqb?=%t>!R@4wnrzVv(mk_6=MLe6gPo&TT1(xR34Qk_3YutV?Q!-{F>VT^( zzsA2wC%9xMGZa>n?rm7v>b91xfr+X7bBWp3lS8m0<1;|5tzGwLLb)TUuU5?i{!8xb z=b}zyzJov2^XQXpJ{99G0(DV zvezRc$4H}AuqW0ODU^~$or+u2C1Bk$mo-olaXz0Wl_}OCPUT$LNSh5W1#rBIBrqJV z5f|WQqWwyQAwJI~3h}r66FSl}1F^JgeH@%kLm87^s-wA%6L^LZV&TfruNOy_tko?D zs8;b^s|@w<`BO_*fDfKc-KGwRfQLo*FS`7|`|f1yK?}zLdH~=Clw2BFIS0`of>HdF`9oGXJr#xCxN*%+VYbM3l$lMkQX0ggoy;QA3*`BqCb)QZKt}E!> zK=P+RFz%ewWk5ag(W>BeQ@Ur927fPNi>hSn*6p?6b60M&Y5HsatN-%(C$!Tko@QY< zZAQ|yLRR%Psxv!JiEJ6Sg;JDUeYLb|&Giyf$6o4Qp~JCTc-{_+YHV883m_@&3w+;b z&jwC;11V0N3DnVhV*J=64*fn8Qz0C{E2I5~eF-MXPq&+~R2FNBW@YO}$qu}sBe5GS z-DlEz0bk>lZPYm#7Y&(M6wmCp$q|wRq(TMZV~b){e&<~IAt1lz3|UA~!NBOb4} zAp*x1{Sm4B@pQ!!Dj~N;dVWBOUX{{JfKpC3HI@!p=P<~4lXY2@c|&CANlCHi2gazD zn$w|mh|M)V!1y-i`v~T2KWsH@&9M<;?P8^|vS~7Efl~v2>!k!dU^p)nH!mgTuv|t< zj3$VRGmH;d{Oi;3$nN z0_i!YD^Y&RADlO@xaxHF@Y7^4+b;A$1X%OB-Rd4KsU$j|9F?&Fs9dos%H=v)i;heF z+2d_hEyk3+Dq~C>dWvn`_0%>=dt}%Tj9c@|RVN3pnBUeMbNQ-ysX1t38OG<`9o(_9 z;$V=lvhrjOVWC-AC~cAg@KP6?F~DuJ(Ad*9sS#1hB<493oHt0%3Dn6g?li1j5+Lp@9Gwp2!?<(0$nU6|mTFt7{Z7xiw%#!L7`g(mmZY)8qe)`cD;NAB=vv39%Q1fFIYTYLls~bFYct=IVWK@ z7r*nn*Vtf51?ib$`}jS-%~-3-?PH*80)$k|lLZ~e18bEe6t?nVt-z`Mg25-Ws@&*O zhSf{1>G{OVlR(juGCfdeg_TK12F~wVA^I5kC>bd&R?e%6M67D^=;$n62LQ^)E-k-N zTLRukH;9MKz37;9UmiDZIxIGnU~cv+8j3(d8Cb)W3Jc6YQv1C>EJ7n|Tc{c7siGp9 zey`0l`Pt$)AG{iz{h{T}*U|WHZ^?Cu#U!n03WBW0#c`GjlwTAc(3io_F~`zG!hY5b zNt{=)L5oB@AM^gw4IYlpUB3SnWVlwHVdE+-;%|pLZRNRIK6bz;LMe6RJplY*eyCO) zQdS;kpY!{i(++@v2SI}o@APxf0kHdW-R~X0dNR3s86iErw4UXOkO0-mHwLd>{B3FJ ziyidUWsUiyjz#~;yA%fJ*o&c2I_}? zPC!p9q~J&(DpaiU&C6CU;NzVsLVAWcIW{ZEzbmj34-5{$b|V4FK>09D@>wg~y8W*0 zm(yskuA(R{@V;D2w*+uzi5Oxz+JJD+(g|c|7=^5-lpl)x@nn;R#J{2x{tn|Qd_FRN zKO(Gg7&f1;5Drz>ml41~Lr#97NLuXDTe`k94A$sjbAmZ2B% zc#>ip zw(kTm8%;c>@aMu}0Jv#R-6nrZC{Y0fD3Jyl4`&&NKrA1`9?5cyB93Hpa*lRfA?~|T z^zEXsB$p`mRl-a}m%Vve%i$J6@7+%Mh)es2Qi!-2{I)M(-6G9ho-rD1@`ZtH8@hTk+-N_Yg zYuhGAUE5kox!l=!r5lAo2n(*OF_By^>ZU< zU3{<$_}wmH+N-)L03Vr#DKP-O)j}P1dXXE@vr$3G5vkE?>(6aJan>^hYiNjWjvcZ%H6wl+N<}U$Hv&_e!jYYOkhA8 zuvBoIpyBklEbcRa)ng>Sdrp9C^Xoa`zOSSnhSQ-Sq$%p~rY6dGFkR$#U9wyHoj18%ay{$Yryq5qs zAX_H4=EYL^z(##aDK=efqHG*ZIzdMtF1t8neA1|6?6U3V3gT7kJb&(2_XZT3=-HL< ze^~NA6S9HHmgu1@M7V@`DQD()0bZ zx@4l1$q0nb<37!)UdoKo5xbmQ@^AKLDxKf12V*fy<+DV9#67tM@`QeQ2K4AHSwLiJ zJP%sOzIN4ZSa>!!6zM6XR%&kTRpmi(^JfH?T4^(8d?X%cuZ&G5cH~=lrdzs}TslUY zb;_6$kgddal+9>0{+utx-Ud|v!z~9>OGrttu#7LIdD&m2IN()VTdc3MPLE3t_G12A zY3O3L!EcI}x9!>+-&RFyDtmH9!MhyTpSj(ygKC17%H3{cyWSQ~mSXw|b-3@cQq~$G zJ#~D6qMWFCa6lA*WRM&^7O&BC+bQH3t}r#!u+LH&y^j^7OZ5Q_NSee92lT-Jq1fAS zJ30)?$z`iX>r4zV9>yLvo(jvkO3HqJqn%7r@?X>9{iF|xm!OF6hkIzbK71LwKqNKYhwnZ$4uWWVX%Tyj!J`^UrO?NhNROQ$qbPVOuMqGkgZMK3 zG8ACJ2%@msW+lY~Z9Tq>OEVq3z(qV_0mG#u27#gaD3N_j_(1fqB2;c3A}%xrsbN$g zD{3Jc17J7|HxdZwQ?clf;yZ&+B+!mrcs+f+6zVOrj~hXdnK*Dj=lNQ0lH2{sF)HU^ z;f<~7gC!L~gEx#C(g(ac5_ds*NNL7Te$r++pi(rXQawUm?-L^~e=yx!(+EV=+NNM- z-Up&>+YmD7wDd3+{)AGNez`nurj0Ny;IXohqB$-N335if28N59H!m3@4s1H#>BdFc zR3H#zGJkIv$;~rYR2QC-Zr0kT+*I_$q{{BBy7VhxSp8Of;l3egx{71HVQMFXcKXlH zl}(Xhq=$;;s;6-OD;PLM#^_sF3?2~_IvxlXL3_u?P0LsIkHacHNbf^h(>QyUuEHO? zL*-Nqk!-pK^nhW(OxIYNPqc^k55aB-`v>#I$Ba@6%ip=(@=Gc|2cJipr=r*Cs`k7f zHhr7UL~Nv|4+Z||7>G0yJ44q1ujTU%wd*=C`p`xDc%|{nB})bq3+p32^t7E_Nw5jL zyx$KWHd^2ODQA?>HLn%!=Bl#ICj1nOIqxw~!|}z$Ky9lB_GtNw)_POyi<_8BV9nUD zZBO&$xdl3es@oyU* ziar=uBcM=k3XbC^r9-DVfa&i*Zd$B41znStLVB3#7T#3VPoOQ8GP98__vspIvkNaI zU)**H`O1JHU;)Fjc(!uSRnbq+RD3Fxeaowg{nf$j_qD+~muyYl zbF7`zwK*P3(jvvpqosRmaaxKQgLBwHCn1%Lb)*-8h!y71zMQmpuUV366Y@Kf`5UG7&V|C*VPo%|RTBq>fijTs{yDw<<5Mvnknd5E-`d&8?Rp5NC*k z7y9phY)#YzH2k0kmyr<(@m;_8e&bv3?gPwp1(5tnRq6}K{s(2tocJXT1>xLZIZnwP z5-`@embBSc$1Ud^=)Y%%8~f-nIhGqdQuNw6>a%_y=|M-AMS5OR{ZlXN@lP^T!uWu* zOf4ISO2?yTM`U@-1W)9&Mj-ZkIi3BX0+s$~QJAP2*jzhw`;D#>1TDH~a80a1v?&|` z^b-qoW&tUC8=w|HCGh!)MOAeZj-}f>9K~gu~IA6Sy-3p>{ zr5}tqALCo`Hsy-V=DQ8up7D9z;@TmP7KIg3tehZv&S}kADfkz`JK^tJKh;oTu5T_| zzsC13s&jL+;2Lo?<91iPbbR;tVRP&D_z;<24{s#abrSPtp5<_3{$RJXV|fN`+UN^9 z025B);Cw)Otf@W~GXjl;f%sVbrnWASE5aSzc0fPMN8Pm)DI1&-X_ceER)nD4rasvr z@7CZ&l+HOH{;X_J)~Ff`=eJwV*B&<()Qzpu*!De?e}U?l{@qM(^)_q>u3p1tNut(& zKJtweiePGiF`*M+Kx0q)d1PqKp9QH)2R=n;DHY-fp}&5l8zx4d?^vFL^!PH}(<@$Q zI{+5~xLX{W*V+*JIf}kVd{~+`cU6q@-kM4RkR?=7X(>N_u&tkn;2Q(}AK!Vvu_#h& zf{%C}jEpe5?V={b4k3F)FdLq_^5uS9yZg89H;AnY z#K^l%%9*U-EBxJpH-b@OQIGZf%)JytQ2HsK&>ssDt&gI^8s|N9yP;>>iF!c(9FubX<4;jkgd|dm0x-edPg6ub6OoZTp+LJxsE3opxH*-fc|hcmGlU z{h5bC*4;!*VRKlvuxe}fyn6xReg)1U_aOq1#be||S30qUA8Wlbde0dGVT$aVIZf{` zt;VNj_sL)vKc?iV?q`^v@u#l{H<%#v{8Gkcl;96Q>$VZAT1GUEg> zvW`p=1_k?{e!g>SsuyMQ|5@h)fP-UMwE+D#(bk_MK64VgckqAIH+@4F3HIEGXPLE> zj^^fbHmMW%W?>`c(PsphbSf?3`^V$49cYz_^oX;sqZnRgCjoF_cpcE9WW(cB=w83D z@MxzgH-st!(m?^D1klTsdXnhsbI3GthLCa-d60TsM?mqzhFvB_U?zWeS*8ffOgy?ep`Ibt24(%Z>Hg znmK}?&&N#~cgbP~DQuLEO17(E*)LJ;2kkMfKe}7{6CP&2cHdg?gIGXxmAG$bQaxp2 zxm`}C*+%*?Xlo(7+%0)9jDhl*+u-l76;jZsuJQ2j9N+kE89?Pe@c?)*QEhXHkRA|J zd3YHyCb|iVku+z>&z8L*>mI$ESP?(+xFz_FKookBN)Yq1GnVEOCVu{qv5$zY`6*dTPLM1r zhiTIqBW7#oMiDEl+-*uyG;WI}J>8gxh+-tn3(@utU zuQ@RR>&vCLHXyvq<4xN(FV!hJRM_YzYgxAU&5W=7&i)LG(-X%n?WdIhS~s>b*1*aKGW3fInoz+ znzmYB*WMSKo~pcksmgARt=y59bR<`^C;r3cH}IhH+dk&NbF5Te!C9IzWPBtco|nBe z1F=6uvy944Z-Whr*yW43eGLO~(0Wa#8UZ4NL>h=Dylw|rC;{VF$zM!Fux|UM1%c+a zq8oMqNJKXR0L&cYT})FjdY%Bs-up+UBZF-bB0=fgTO1>pNBvJ+U#9S_VYxvAQ$%9U z%zG|bJ&viR6U!_41vxIn_0O$Wi#>LwIqrxgJ!zyz15zyCPBSs)6P6MtVp_ zluEk!qnNV zQ|w2@_?VQ$$5xY?bScs#q{keBB2)i3xTuSr2q55Y=ILDKuJK+?Pd{`AK$8GU{Fvr}yyia6u*vavZS^coH zAE6DVx0?9FlS=c9)X&+cv;YGm?DO zDe$a>W{udNiW&Cv<;}sCOH~)k&!<#J%=iBBM*I$3w%n)A5Djm;I5*r+F-?h61k=Up zkGg9_7ikIg_L$=xTb%*&{tE)Tq_bLSy0GDIE74i&6Atul5>Jk{1$A zgln1~4cA*CZGz7?dHMwDV5W3r&EWnrhJ^J)jgep#DSu*`=LC{Xf!b*?s!}n2zfr|7 z0bmLU&;YXyvMpHMKX*d|eC)%r06yD(`L{m{e4Z?T ziRNi;)-1$g&({y>DPq_;&jR>unla7oZ@oQ#bl;Ra!{SXzXX$vXv`M5wDP~)5Nmq;d zn(vwPBTJjh%cS@;UEj1cY~0+ZxcZA`okRVOVW?4<#RRsR_@oYlXi0Tt+1Ek-;uupq z=CrI2*svFF5h=ZBs=O~K@v^7n(nmw(oX>qs_Byyey6F>@chr`$-vzS+*u;U@El5uq zgEsq`q%3A!ZSuMJl%QEiZwLA$4G}|VF3ehFPES`#!5*th*Ix`qf_D5kWLZV%dCGjs zQm98E)I4#+)cE-($-J%&Ktv}T^P(u&i%m55G3y5R)S&G3)BP0${QP+0wqVi^E zMRU+i+Syfxk}%ut8YG5;SO}Az{=KBT9f-wEA-oXf<0#|J`^j|Cci+&lYxr|^>~vT) zF~)cdE@4r>p#EYt_|JAIW

7f9;<%`WWsk?rD%{6pqtQwqtQCm@4T)DMlto8Vj&s zD_=ky8%Qq4y};UPk&4$uq>`|@<)=556)#^YNJd6ZPCJ~^xV-Y9Bh^x<>?nxcY1=v0 zxMW=27P*tN%5HXkTYb*ZxqD`LBZ)zR(JsgXQlupVgznIX@~!mv2Yo;lbn)=k9#pJG zWi-R00ZSs|6VK#$;>WnD>4AlYcA}GYl^CpsDhr6vi#EqIbs=u^a>5{HCe%e|EkMP> zD@g=^7*&Zd;>xVa6+gih28d8lK2xDe)DSAiuoE2WjHIWVnK}7f5bU~(PdE|MNKC}Y z!Ah((k1icxdU|SVyf;-YrZU{V=wlh&rXTm~_VYxYO>c*_FC7a-mpKUk8z1v$Nfs&=wgKCx{Tre)U!ZAD4?s5(^$YbKNii-lK~Wp1%U%V&9E=edFE2*@ww+} zvlB_P^E@68l(4FIfQmGLQC&ZQeAO2OdwSOu<-N*AY&2Z#m!oa-AGyd9tMz)Xm)y|N zL;O{}DZ0u&Pr@#yIow7Cwsw138>&JJ9fm~|2><*4sx9rW0erG zwLg%1**x`RTEa!@Q~$jEZ+zZ?fl4_(XdJd$=d@M-;jKi~M#c>2q%z6=4U3|LCF|80MuS!BFclR4i9)r|yLW%n)7xT$~%OjPxkb@X|L14Qtn? z@vw~@^`BTt+MBS-36yo8^R$hpVt*dFt0uvL(rZyQ3$Zi4?tM}r2E)TJxP@lQ;Y5rX z=eZsIGAdgQ=^xff_9I4_W#7e{Xilwmaz7ZKTz|}UqTGRKNMXRgj{lukV_P*$qjJ;L zqvS2~ieV71Wz80m@A)JowEpww^6i>aSR2x#MJ;XBNj^Af>MTDS+K)~Q#-o^{ZzmEZ zmRwwUSSc>pQx=n}@J2`p*zKp2TM!|n0;I(bV}B8;1~STs@Xuz(uk$9bjHZALJ&T0R z?czVKT4iu3@X&0Ig|sX+(cOs)384v*o*0N`>Mo{R&v{G64+Xm)xo>7_gU__71^=u@ zm#OfPmWw?NIZ6KE!aL=JR(RamV5G+b z@?E`Nascgfb*Ec8S`x}cth9&Js6D)-kGvbLw&>*)yI{;rp%N&&%tzlUB8GixuRM~t zLp~=~%_rF+8eedk#A$#W292iQ(-9>m$nZ<6ny*KfQ%@AnW=5BV>Egr()A&B36Zads zAs$r>SM<5BWlQO`D-?g80)>yRddh&=I_LzGL(c_JH+GSpU`XL@{GVxJM$8nVWJ4Gp z)IyV|l`f_%(8S;?8U`$xCli|$%(|kh|afO`F47z$8!%O01I4lp33q@#g%dj_eW~C zV;j-k@bObbmRzx5^H_!s(8W6;J&6>vYvP&*c&uVB=T^m_lSzAq=IVdyu|%59#dJ$X zM>a_(vgSpk!y+d8C9Ov63z2Rpxq<%-T(gb-J;YzLlw>BicB}X_ui&z0kr}YeRXjb& z2K*YNd*rcZLxDf^mOs+Z_S^pbefwYgY2sGjGBwy+dNF$X1*Q8Mf9-GCxO_g}^54&F z=}B-;nEy9ER!!Bp3M4Up!RUC{by7WY z&co9YNfAkXxfHzgHVZbT$3Nfx+dnG{GHL(bV=#pcpC~^oEF-w{Ush@@p8`G`${U7{1ubOI#bd>qCC7SY(Vs&UTf5i)%P&urndTiH zXb9<%;;fmD*I1JRYI%lX@9f!=45{*JJyb}d`qLnWcGsJ%KLJL3AqC2ig-JD|EU(XshKuNPbWams=V*Q~5KC0a=Z=l%T<4Up5DGT>?$-}1Pfx83)8okC zlDb-@JCSv7%;UP}r<}FrX;r_z;c9^VH{VT2o1)zptt%l}w`-}WD5JQ`9%gKGV3?Q6WJ0L?$M)Y3mzIk~s4CCf z;0FSsA-%XT%Y~=e{l6uAcAFPg!-GmV63=-IMFl7^!ip^%59JM61LPvvF-&wEXV3W3 z#iQXn94o*tHUu2V_>fS`j+X}Y4c!jeu#!ThnjPZ$W>+Wvsa$Y52Wm;fFraJJ4N4sf ztsA+$*Uq{nS`_9pKY!<|x4`&pu5T8q1yZso^6?S3XuQgnmY^tm{%5xT-HC475$bK; z8As4kx*0}_U;ogz0@w&#OWm|s7#%DxwY;+aI#QL`&;w%c?Z~Xj4OcvZ&6vL;J#u@;w@o2{{%?^GT98x#y)VnU4XKU;mjuKB@@M;e!(}u^1H- z*9w0bj!ETCJx#rxhe$<6XY)N*RO)LKZQS;*PAKrI#$LB^?b~xTeO}yH^S1a@RJ{rm z{RReff8Mea{Wbstt~Hn}%?&ye@LXt0&g8C!N^?Cf9n8pRR z>F#SqWtx83QfqcznkQX3ET%+g7FK&-kH!JDNqnDff^M%OTU;|M%BCo#Ji;Lkb0B6Z zFDL!0L3}i0-2Gg3k;L>-ZQk(m!!vH*)r7WCq(_c&?nDZ(8L1n6ZEnosvcKMy(t2y0QhjYLhxoUI zhpz1{_n04yKxF76+Us84R*QrrLb?B?5VFuIfq#96)x5mH=S^ki8B|>5+=l2X&~A}s zEo3joIyMQ<{-KyIuw2)?+f48NlaZ|binWLpD^2-o;g79cDD1T6fdk=-c(CR%J`BEP zlYS`ZtwwrG$$zKddr7BkfqwynnZ{gGcS}XLkHWPUXLYHo&CyX>nR4H#+ z8_Q+uEe>-t4zpAli77==c5HXzM##(Cn6zRXBV|O-D;fx4I7ZdTCh8t%@WS-dVHV&Y8%9tjHlyT_y~j)e7QowLwY_^92W>+t>dW*9c$ib+R#cbNYfI5 ze=}j`7>=d@2ncBsZ$(b0cL#-}{F|+L(Lj4;O8TaYs2qFMk61KFci~`fA#z&dv0rA&S=O^K}JYM8b`^m8M6M8HbRz3nOs2M zPTi$*b$Q@&r$T6Q!ixpznWhkVU;gSX-s{(#Q-4==ijr-J4BdF^I7cv@Zh})>C+c&= ze~8v;Z3RWpP4nB-saVDl?xnDGkJ7;Dv`(dg35$aR$JBKk~Ge~ zmo!dj?`am#0v^H1scB$ywH;J0C?Jn7F`%_oqs9)cC&pGG5MsjVc%4XwEhhzKZ8OPU z`tNz7A5hB^7+`^T(*DQgE5p-*t@GoX;3I%kayoW~ZqjIYY0e%mI!P+{LWE}kqGR4n z>Pjt(=fThO66GTu3Y<3%BgX6_0D$&B%NghhSES~g;_2CCLur67VWY}ZyK5=VZY**O z=b%r8L=dbmuv2pJadN-*^g{J4y09TaeeG2W)CkE(!QUg?K4ua_#^;Q>-%S{3qzEw1 z7jS-0Zx|NIKLX_%HpIc{>ejAK#PKJQYyp6RtLMs>TDo1v*?*jdqUUm$`{}la#%bAxv5^ zL$ymG-YNnV5@nA6p*<(`+n`}TRMj1tC(#;T7h6ZZV&zt@rrJ5~>mKTR9* z*Y2m=_Yu>NjrTk&|HUU=u=xL}?JT3B{Gzr$3`5rdLk}_3pLXaFqy`vz=n#=^kZ$P? z1*D~0N;*UV>FyK>2`Qyikemm6)_OlbU*7Y1X06}a=ghhHeeG-SxypXNJcRk&a3_jX zHMiXv-9_G-pvjf`w!PEig0o)@3q2hhB;8V7ZyT>m|ka_KYsl}rE0 z-DCIo3pO$Ob0kLJ$QUs$;iYXN5*J%$%WSI~CH7oiMy@WT&We)#k@iL_vfeXQb;{z| z=lv$38wNx13{$82$0@-bIND4KE;9e_A6XvYj0xEhD(Ag_9`KVlsLLOxbG!S3JRQTD zb2~yNGHTT2-PaMOUG}^ksnf8nejLl2AR&{_xH3Y3EpvvaXw&cJ+CfELmuU1WKLehb zuTHOd;dfmcE=24jKj4_m9ieFhszNgRAB%>!ro!2jnNG?JlkU zcJ*`A>XU~gJbx05Hq9Fnu!4U(q2g)Eg}KrHFS&svN<>>+I8@ndIY&+uy>Kv{laRdp|KhJsJgRIGLRM#fjyqxfd(Z zcXKvUBur22Bne5B(2!;q!JzGc>C{io&S}gF-2mFR9;Xd{?g`P(@Q@-NKP3c)9;iti z$#BZUTK~Yq2%Qlax(mQhf2pZ5>2y5-ay%Vv+Z~mP7>}S=SO$5du@^5*V`&L6VcWQ@ zAC04bmtWaPR`i@p^b_O(VT|Kr)ge|Zd8Yd(K-0eFBEfH^CWTtxRla65Pt?%At%Q)- zUYtQ31Anrd$Pf^0jH~bQgLl6X4CsDod}(*M$xy&v`-TT)o!GWRk@T1CfrpRojI#Tq zZ$F>>2)1DoJC-?rf33zy=vIjM@K5x8p7CQ;AwqC~5E~CuW}kw$y489USZkLgNkJL@ znsCz??>VNj^Ba|lnc{te7C1ntmBh`9djmrYKa5`YSw!s6S42O%bsYZ7^w^74;uuVQZ?X*Px6iDv;V8V%R*=5K6d&=@~EH?8y3>&Ry2B@ zrxeu$bVvi$D?m3@+w*7A31v8b>1m|R`eD?|G9^tH&0?#>$3!|rI#H<7ggwh+B=$ z8dVK_HB$Ao7OT@Jk2lmFxqQMEIc#~r*!A+*^mOc`y^&Ivnd?!|o&60k5vBAZFAzpXy#W%KS#bNISezOfgf!u=rn$)an zeQAlP&1kY|=c}7t3k?}3FcC7_1AX*C8r_9H9TW+l*dfM>CL^uVoMORI42z%Y>c>h~ z`{(~Lht7yh=lQ*ADc}zGY7;W^n<=$bNr$i^EVuPQ0In2G4w5_)l;@bBjR6Tg`69HQ zEa8lbej?-Ym^jLn$99lU8qMI%c3@rW>Kc}{j^6u44StV4#-5enrMM4u_PA1;Wr9v5 zH+J#myDQ{HY>qazUoykbhM+VhmY#^U?h!Y=OkbM25bzEY>&nCVaiwFCsOa=ZWdEXJ z(&rk{mabYdOF9!#^f&>U78jUzh$+Sq+SfIgwu;&G1lj z_^B4fbnyDcHP_?h`5}GSeQJA9f^w^6ONtjaLI(s~+UN?luoK}Q_y)G?*5TphJ-7u`nO)`j}BoV{ln(js3T zvlpa@7P}mNmGn~l1HmQeYAS4q;JwH3*O8nC9)DX@kc9(6@@q^s!W8 zzewL<+dw~XiPak7X`{08+s2~YXXSo`3%5{zoXi-&mSbH75dzbJ$;4EtHz0XmrErzEUOE5) zZ>a$Jk7RJ&(BQOfEL9+8IHgt0n`8K2b?pHg`^x#U3@s_}JZm>sLQMm^obJ%%15Y_U z^OKOE9+kk#>yvMYh#bjc+)SV-ndYhN8@)E|}=7p9`Jk1+sNQd?r;ZOrBfAuV?&!M93_45QjtGWN$Xy$oS3 zACLb)oe8FKVg2i8M=Wzj~<#xWg*$xB4_JgbzW7IM>)M9mu3JHo^f@xtw#g}HS5oY~SkLE$)CuiAHA4)EEIl(k7)B_-8K2u*Oa=(g(jj$Z zc-P8lhB)Qf{rn|#Vb}M_UU?C?+P#Ewb4wOvkd-SSasM8Vem{D(%2I2k0b?g3xxf1E z*PGo}G-+U&uXklj;|mVsEh|n67|;2a2T@G8%DN|3p%!sswv$@z%Eq ziZp!|96tMEY!v?&ro*DecSLsz~ zw-2r?vWyK+*>HLOgqeE@g5Pid*u|XOYjfktKqij4=|Xg4uchsHo+D(jynNxbI^g>S z`pRF&;PTV9U)cbX7++B#0NpW6w(Q3f9B{LK`26OW?tVzkrqu23g!%ETG~+c|-Sl6- zRkQGQQHb;xUyQ7)yVSu@vWCHr?=LdFJ|+uBXBS`h2Hi;tt}`C7i}|7RCqbA%2VVZm zf_4_Hh-wEFS}G-gwkIZ5mOUQ$8LDWD@}n`W9dmZ3QmUTt*9&@YrNUjjAnBSS`TA5~ z^@ax~AH9FZgfg(${fC|xtvi$cB;oilO9QSuwMIFfI?F58e|nvnplV*s`K(S_UX;7^ z}Y?9F94|9p` zi1E=n`rLa_U9DUXOJe5$14#J`$fS_W?c8A7u=TEqtoFmZmFTle%Ef4o5BHEjrJf+myZ7 zMgFrCtFZO!Y@Xqe7=$fVm@c*9_hF_3bxyg>!gCVi=2RAqH(mkc2~4RLYn5C$%fUy? z62R7{_6Nk(E~BuuduC6Hcl-5-3?vE*XA7MN2$sJEn)RMl${=ej{M20N?(fhi`G zu3j{0vZEdsMN8xA>H6e|o8&odS@WXSm%fpmB(li0&@z4Qns_-mCX_E1sf2oDw>i@g z0~~gI{0YR~+>*(mFO)i@{$9Nz6QO7h(l$5G_2-J$lg9yIfwi4#^sK%e2E+cvhlC!_ z;a%P+ezlL$84{?9B(hFc_U)LyP+?`)rUT zS1ZF@uCi|Bw9o@qB-q9E?a-NfQTLVDTZ@*QO7f-+@WX!5L-)=zP5q*(^|UIk*s7Hu zUCQvDTUvIO&N={PBT0#W5{0UK5W)shFcI-~*ABu}rFc{f&k+r5$V~hvJSqi`0wNS; zEahd(A_PZp{H8s@APh|QwU)a>=`{cd1JDl;OO1W050D_;jmvVMLaG!%*pNCJT%VN- zB)l`1FC6i&N=wuxxidZlzWC?Q_L*E^vvl4x7f1!ly3=1k0bGvRMA@*P5O3~=5vO+kL<9_L*E5R*e&RusQ zRdYh$>gvx|_I4)5vx?$v!P&8|J8btaG#lCm#?X&xMDhxrl~d#;NmlS*BKS)Mk5i-J z82|jnQqY;aI9;P;XixUrYaYJk3^EwuB75A0(3#b&$6-=zhr8`-g!>PZKzvj|2@STw z(~s-;pkoy(9OyJg4`|Cl4zo9*=H8WYeRGSx3QDWGSe>p;EY{Rnbr#7Q|0Xp*>mEjy zen1VREbN-4ADroMVaN)!z|{CahBXDmzEfg_*ZF zF`klCgB3v01aq#<&mb?e&oVS}1tPG;U8F(KB&Rhd_V;5qv+MY1pl4t6-1Lv8tEJ{K1Dz$up)IgG3c;>Qh1 zC(Q+MN$$JPT*;r>pDDs1eOX1Jv+(FoP79MSR{G4HZHZ{$e6&78DdC2x$7t%a+L-W4 zF8t~rHC z*Em$zg3(Wa8~IAo9WuhuFJu{;6n<>pWN*r`?^DUNN0&o@158y#Y$pR1z4qF=@}K?_ z0!=zkQ<2aMbw;*&&=cQth9uNX9lpCaC9 zPI`CnyH!PT9KspT&$VgXx!sg9+M+P^x^?cFB=Q`$4|b)1*cd;8V5wBG2evCUeW9be zG2<)v#!oAIvOMfR?cB=nG^~E|H?0_<3yZ7#025SO=D- z$k~@gNN3`tX4#4+^uXXYfbiHkDn-r$P{>1d)+T%^aQ95Zvti^f1lz zoqTK}G1juI6KP}bmBofD=1t~(+7$JY!Xhj`t3FES>z8hF34A>B;|4d8yE{FX6Fd7efDW<9KjYDFo0)0PLZ z>R1#S=moE&7(8Z=rgJPC8!x<6w8)|UMgjLbY>b4KGiERfL{adVlZ-rWBpsQ$zp)3U z2gub?Z-6<6nu>1Zkm||pW&O7j`W)iBB9m9^UBRy@T(nxw7-Z`VpD7F^KJa{?=XXf} z`D+o|g%k81Y-@IjZSMFk!fzL0Pl9%JQ1ckdiG3^)Eg{(0c7c7vwm|to#Fb5*;v-#a4{o znYsS;Tq3RG>A$pZ5&f6RG64rfNK8y{V#LJW90FivuIe`3!2muBPtbji;Wq9>m5D6pO@0Z%fMjw<5pe*a7F=F##O`6NM_i8L1+4b;*=7b*nIBgp&JEE&avt1 zoTrb_-Cx*~0Aa=#$9-cFkwajF)azh`IZQtBnMQxVtE!ZXu0>PRp9cf1Yi`(`ZM^O=)DD z-}Y9Q@N@Z3v>r+q>ESiwa2x%XDc255_Nnwj^OJeVTt+TRy@F9$4omw3q@<_JVr@NZ z+%05;jm}^C8vWG}UsqYCA0npvq3U?j+om*_D6@qE@ct$yGof!Q;dPvJQ&QNq|bM_xI_P=@q;ms$Vw~Yck3whYcAL z6dTq?m&g_Q?@rzG(hT?Qc1d0hyz(dl%eeEexaqb(6`Vr2!~OgDa$~CX-LvEF1;?9# zDb`v^wG`ridgd<{vL{K`lhFOU7I3o6+>R7n;4RsTR_uzhnfN{IR)jPdV)(m%PXjUT2?YM&OiKvlw+2a9ZYHriIr05_pXO*J(i|*SQTr62b{W zT6lfmguI{{CldtzDFob&LSjQ{EwMwxBd`p~fM%P8?f^h&%ot?0CgDh)AF2A=YM8ny zW)ENrbRdSLu~l+VPCWZhL~~WKcmg8bc0_G1EQ9yJvkj#wYtQpn^u~0d#2(~Pm5eYA zXjMX^pWg!;Nkm=UV||3_u7(o;Nn}!ga{CR`IbyVhQ1AWME=*i!)Jc zA*>ME6B!H#XsW4Z>LZ@bs<=?h2m(Vc91uD1y*Ie*Kz9*3-~$k$)(v|EP&OAX%Ak*R zwyTzIkGak#mOjNk82)?rz;n)OcLoPs2(jvol8Fz|;SnI_HSoOKeLdyr7X(aro-lEI zBsPj;@HlfV-E~=ts=2Pm%YE~Sg0CmRu%AD8|8~XH$MW)VJ+%x-ya-ZmvR{OVi0#t2 zoE<0b%0cd4QA8Q+{~;wMC98lM^fssjpbM!HuF_Q+w&~1E@E#f$+24b1V4oXfY+6HsOo7F3Ajzc14^HQ?P zI!ckup-BmYvggj$^916yEoybh&%fL`M+bP-!sL?w46es8!xSMd3I%5M21Ojsz#lpC zkru4=SrE4?a_diA9+Uy6*X_ZRg0tg1Rq^d8s3pUrq)5l@HEmq_l?U=U0&Kt2 zk30=RwRkmkIIc3Noiw*yN--aIN_^B&^I4LO;JUxBvoOJc_)2s~b^uEUWTD;VouzJz z)eNsQ8+BCvQ>kz2XQir2=^szUL#peUIUji37#iG5^X})f+5HExS{-BmkmHQ4-Kl)9 z1xzXV$)xg6>whffx*sT!J`!)+h^?Ax%tBa(D*a8)uLWoqVr~QGcyj(x_Wz$c2K@9kO=)x!m#g6ewNfY=9;H(&Nh*J1M8Axvb*`WqZ z$V6P17zfz7Y`H_fNZEOF%e78mUT00W>n<7IZ*tsUdn57LncPe`^h{c2fv(`W9xWlu zkAY|Let(e^jr{%7eJX6@$EvtTEYouE7)VQ)5dZe{MmhO9+MjRKcouz2Hm+hAss9ik zGg!9qaGpPb3KdDr7&|)$i1t6`h_0hjLK61EY6tRWZ2jz+peeBrIPv*m_7?L_39G3m>jg`?@5H zN(5es@UoX3V2VP~zp$*V9Ih7}IAoU^Q-yKNd7h&?$ZQbaXms=bYI^l6QYQtZfi`OW z_FbdJ&#?+VIy|O8-**G)ta@g8xexKNqq{Qxoac}5XLS+^_3=rXm*wWQTt4y7F8s#F zWiG%HrqV-BYu?kK@A|IjR6k9nQ2%A6S|tBtfrzce<(st_LI+yk@2h`p1;~UWT`a>Rg@)^j85y0W7OpG$A3A&J)xKo80 zUdN#wZ9J9UrdIk3>$AO}CL5C@k%T)c?zcfZ_q%FgOvu6N%C06VZzcl;D`NNBOuR0b z1BM|i4`%UFWePDoJ`XISn8XPAB3asP%AbpX1q_RDn1YhvOxJMOWYIn@yDoLGyR-M? zJ_!YxZeCtTPEnd|&5-&7&pToA&S!Bw>h^h_I<46g`v0XHKC2~P(+8|tIV~&3}-h4VZqW00NVwE*$tZ1B2|M%EWqFj{DbAR^bvAO#tonT9o z{qH_B;0j%Ei#6;99OsYN*zo5PPE=c_89Dp62sW=|T^JtnqIr!L(Y%%uKKYg1%t(fi zm4#hA8D{>X_&f%n{SU2&7mhzz&{b0$C-o+qF*H}(k*K=Ny!Qav}4)pV=1Hi8>-#oekbdcM!d!Ad;drsxWnza3dUSqnm6I+mjHJz3B z^$$!JgnyC|BJ6Gp>fE5E+usF`?*bK8iu}x!()9&o0Vq9KQ{*@Y3Q4M*V&>7Scc0-^ zwQG{-p2091juk#VSMSSEkzopdG<(}4Q@)_PJ#rDN`Y&H4Wh_#xTk9R@^Wu6dXa&ms z6*#E*U$E){i!(D%=SL*=^+n|?AimWB##RU>fIia#e-MQX-7clo9Pxnxmsc^tb$<6b z`7o8(Ny1L~Qq!f$KKi0RSxXVOFXRMn;dmw5M^^He;m>kVqf>{ncO14hkm>o{w8Ki$ zC%Lcb&g-(NOrNr3Ih(f1`qZ5s;zI#FH6G9F1$~IO&}dm~=3&D^QEDJcQkwJ$A+u=;#tSJo ze>}dHrB8YeZ?0cO=3tS>$WlEvZDkL6qCV>VVQQ@Bnyhq`$1W`+%GYz+-Sy@@ud`&c zvAASlta{J`4=WU3uJ6uwlyB6qWwmO=be*Ug14Sz%aZ`Gjx*}4E3&F5R>pCxTvbH3h zPu{oXbTHC=hu$%VfrSQ(+nV#QMSZ5c1`#bZ9(*<@$!x@=a^c=Bf97!$tjxhfA_W2b zI;!-FnN5;b$!@3-Odgnggt?BIEXE=YGO~{zGLSQX#5w`GQdN4u%*P`ictq$(8`SQf z7y9jODV&53HWAc+lvN$S9D#msaQ%mY(lvrj!`0>%ZxrU98Q06fs~iJ^CvSyoJmfKX zUnbsEQV_gVPm62Jv|2B$aj3dncl{5SGAf9}pJhN&!*fu>~}7$E=v z5LtC2FN1#T1wNUp*5_>Y@htov3E6a++uF(c{gmbPv#&LDjEb;ZXS z=~FcW%{v}#2?~g3f*^4?(7|lk+a-5;l_gXw)MU$u^fA&(t=Jnw$+9MAOMLW zkdtCbcOQI_e5tOThx;oq}j0IDXqT5ghBrgOBhKJe2rByBEy$>*| zdSRROE6+=KZmcL_Q>G^lTi|Q%Yzb$sF`;zkQEHNS(+`hsvpq2o1v?40SNX8=`u#O| z&*Kc`jPf*FP;^DB2U`oNHh)<8{RJ*b5Q_k$SWwDN!xELNU_mDn;z0h@wldHx&7V%d-^s3d8N!*uPh9(P)@>tu~9Ol_L8~Bl@gYwY!|TsBmb47LFT`0%(}3 zE~8UoPbN%#?;BX?qklfGe5#e!hrC5!>!G9aGk z%AEDVov?p?iClUkpli@5wx{DBnSJd`c=On3nxanQB%@25c`vY`vMW^qtwUta*v^rbcWQU%*Z-p{JsUDMV#nGX9Nan^8 zVNwW>jBU|(I5P$HqbXgy(yHiAsArjtaCWPdinfWuT1mFKMq6dvYxAla!H4rx3C(_@ z2LFP6(iK$6w_}g`@hqRlK2VIWFhh9&j}0=aJvR78q@rS2bt1JewRSgkwqx<`YsuVa zVJ{*A?ntoTP6L_hlS-lmU8CD@3Vu7(LWYh)@;J&`;G}M7BiZbrmAZ~hAHa;ue$es! zMfmo_UOpGwV_D|bxbPG?k+8T;@tIG?ZWD<@q&g#SA9y}6q&z|3uHc_|Rv+wGY7PAG zkub=y9Lgb&YW6#OOi^~N(;T4P_#ye`J>%t0{$=NLUJg@jlopQ%u|)B`3^CYP^&UMk zLdlqlFR-=QFXt4%n32I_;*Y9Vw%1;Mu_HY~dDeAb13E(g<`O#s)X^H1kgmfjLy}c^ zfHcDq-I|{4yiyd&{p(1nP45Su9uE9XIACk;_Z)@7n*&L>X$a?DqKHs;CE{x}ug9tN zR*@4p9V1-cstAQe1_$CX3&|4|_VNsk2fU}d6P?GH)OSFsb5pUjdnUB~g3DFHx>@wY z8djLiD*wB5%s%of>SVdLUiM!8>v`Cs95nI%PWrY{Xs;nO1Kpr>^?M$hf(^17Cq6kW zs;5W$z*E8&CkF?ND4xo_u~CpCuV5JEMxhq0g-aZvnY@bn6`XQmZjo3ZV(+b?u)b%1 zadn64ARNf4QxgkdRMPOzaO(50yfA35{?{!Og% Z|FRx GraphDBInterface : +async def get_graph_engine() -> GraphDBInterface: """Factory function to get the appropriate graph client based on the graph type.""" config = get_graph_config() diff --git a/cognee/infrastructure/databases/graph/neo4j_driver/adapter.py b/cognee/infrastructure/databases/graph/neo4j_driver/adapter.py index 1121a24d5..e6520e4e2 100644 --- a/cognee/infrastructure/databases/graph/neo4j_driver/adapter.py +++ b/cognee/infrastructure/databases/graph/neo4j_driver/adapter.py @@ -2,7 +2,7 @@ import logging import asyncio from textwrap import dedent -from typing import Optional, Any, List, Dict +from typing import Optional, Any, List, Dict, Union from contextlib import asynccontextmanager from uuid import UUID from neo4j import AsyncSession @@ -432,3 +432,49 @@ class Neo4jAdapter(GraphDBInterface): ) for record in result] return (nodes, edges) + + async def get_filtered_graph_data(self, attribute_filters): + """ + Fetches nodes and relationships filtered by specified attribute values. + + Args: + attribute_filters (list of dict): A list of dictionaries where keys are attributes and values are lists of values to filter on. + Example: [{"community": ["1", "2"]}] + + Returns: + tuple: A tuple containing two lists: nodes and edges. + """ + where_clauses = [] + for attribute, values in attribute_filters[0].items(): + values_str = ", ".join(f"'{value}'" if isinstance(value, str) else str(value) for value in values) + where_clauses.append(f"n.{attribute} IN [{values_str}]") + + where_clause = " AND ".join(where_clauses) + + query_nodes = f""" + MATCH (n) + WHERE {where_clause} + RETURN ID(n) AS id, labels(n) AS labels, properties(n) AS properties + """ + result_nodes = await self.query(query_nodes) + + nodes = [( + record["id"], + record["properties"], + ) for record in result_nodes] + + query_edges = f""" + MATCH (n)-[r]->(m) + WHERE {where_clause} AND {where_clause.replace('n.', 'm.')} + RETURN ID(n) AS source, ID(m) AS target, TYPE(r) AS type, properties(r) AS properties + """ + result_edges = await self.query(query_edges) + + edges = [( + record["source"], + record["target"], + record["type"], + record["properties"], + ) for record in result_edges] + + return (nodes, edges) \ No newline at end of file diff --git a/cognee/infrastructure/databases/graph/networkx/adapter.py b/cognee/infrastructure/databases/graph/networkx/adapter.py index a72376082..d249b6336 100644 --- a/cognee/infrastructure/databases/graph/networkx/adapter.py +++ b/cognee/infrastructure/databases/graph/networkx/adapter.py @@ -6,7 +6,7 @@ import json import asyncio import logging from re import A -from typing import Dict, Any, List +from typing import Dict, Any, List, Union from uuid import UUID import aiofiles import aiofiles.os as aiofiles_os @@ -301,3 +301,39 @@ class NetworkXAdapter(GraphDBInterface): logger.info("Graph deleted successfully.") except Exception as error: logger.error("Failed to delete graph: %s", error) + + async def get_filtered_graph_data(self, attribute_filters: List[Dict[str, List[Union[str, int]]]]): + """ + Fetches nodes and relationships filtered by specified attribute values. + + Args: + attribute_filters (list of dict): A list of dictionaries where keys are attributes and values are lists of values to filter on. + Example: [{"community": ["1", "2"]}] + + Returns: + tuple: A tuple containing two lists: + - Nodes: List of tuples (node_id, node_properties). + - Edges: List of tuples (source_id, target_id, relationship_type, edge_properties). + """ + # Create filters for nodes based on the attribute filters + where_clauses = [] + for attribute, values in attribute_filters[0].items(): + where_clauses.append((attribute, values)) + + # Filter nodes + filtered_nodes = [ + (node, data) for node, data in self.graph.nodes(data=True) + if all(data.get(attr) in values for attr, values in where_clauses) + ] + + # Filter edges where both source and target nodes satisfy the filters + filtered_edges = [ + (source, target, data.get('relationship_type', 'UNKNOWN'), data) + for source, target, data in self.graph.edges(data=True) + if ( + all(self.graph.nodes[source].get(attr) in values for attr, values in where_clauses) and + all(self.graph.nodes[target].get(attr) in values for attr, values in where_clauses) + ) + ] + + return filtered_nodes, filtered_edges \ No newline at end of file diff --git a/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py b/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py index 96f026b4f..6cbe45655 100644 --- a/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py +++ b/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py @@ -10,6 +10,7 @@ from cognee.infrastructure.files.storage import LocalStorage from cognee.modules.storage.utils import copy_model, get_own_properties from ..models.ScoredResult import ScoredResult from ..vector_db_interface import VectorDBInterface +from ..utils import normalize_distances from ..embeddings.EmbeddingEngine import EmbeddingEngine class IndexSchema(DataPoint): @@ -141,6 +142,34 @@ class LanceDBAdapter(VectorDBInterface): score = 0, ) for result in results.to_dict("index").values()] + async def get_distances_of_collection( + self, + collection_name: str, + query_text: str = None, + query_vector: List[float] = None, + with_vector: bool = False + ): + if query_text is None and query_vector is None: + raise ValueError("One of query_text or query_vector must be provided!") + + if query_text and not query_vector: + query_vector = (await self.embedding_engine.embed_text([query_text]))[0] + + connection = await self.get_connection() + collection = await connection.open_table(collection_name) + + results = await collection.vector_search(query_vector).to_pandas() + + result_values = list(results.to_dict("index").values()) + + normalized_values = normalize_distances(result_values) + + return [ScoredResult( + id=UUID(result["id"]), + payload=result["payload"], + score=normalized_values[value_index], + ) for value_index, result in enumerate(result_values)] + async def search( self, collection_name: str, @@ -148,6 +177,7 @@ class LanceDBAdapter(VectorDBInterface): query_vector: List[float] = None, limit: int = 5, with_vector: bool = False, + normalized: bool = True ): if query_text is None and query_vector is None: raise ValueError("One of query_text or query_vector must be provided!") @@ -162,26 +192,7 @@ class LanceDBAdapter(VectorDBInterface): result_values = list(results.to_dict("index").values()) - min_value = 100 - max_value = 0 - - for result in result_values: - value = float(result["_distance"]) - if value > max_value: - max_value = value - if value < min_value: - min_value = value - - normalized_values = [] - min_value = min(result["_distance"] for result in result_values) - max_value = max(result["_distance"] for result in result_values) - - if max_value == min_value: - # Avoid division by zero: Assign all normalized values to 0 (or any constant value like 1) - normalized_values = [0 for _ in result_values] - else: - normalized_values = [(result["_distance"] - min_value) / (max_value - min_value) for result in - result_values] + normalized_values = normalize_distances(result_values) return [ScoredResult( id = UUID(result["id"]), diff --git a/cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py b/cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py index 01691714b..97571a274 100644 --- a/cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py +++ b/cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py @@ -11,6 +11,7 @@ from cognee.infrastructure.engine import DataPoint from .serialize_data import serialize_data from ..models.ScoredResult import ScoredResult from ..vector_db_interface import VectorDBInterface +from ..utils import normalize_distances from ..embeddings.EmbeddingEngine import EmbeddingEngine from ...relational.sqlalchemy.SqlAlchemyAdapter import SQLAlchemyAdapter from ...relational.ModelBase import Base @@ -22,6 +23,19 @@ class IndexSchema(DataPoint): "index_fields": ["text"] } +def singleton(class_): + # Note: Using this singleton as a decorator to a class removes + # the option to use class methods for that class + instances = {} + + def getinstance(*args, **kwargs): + if class_ not in instances: + instances[class_] = class_(*args, **kwargs) + return instances[class_] + + return getinstance + +@singleton class PGVectorAdapter(SQLAlchemyAdapter, VectorDBInterface): def __init__( @@ -162,6 +176,53 @@ class PGVectorAdapter(SQLAlchemyAdapter, VectorDBInterface): ) for result in results ] + async def get_distances_of_collection( + self, + collection_name: str, + query_text: str = None, + query_vector: List[float] = None, + with_vector: bool = False + )-> List[ScoredResult]: + if query_text is None and query_vector is None: + raise ValueError("One of query_text or query_vector must be provided!") + + if query_text and not query_vector: + query_vector = (await self.embedding_engine.embed_text([query_text]))[0] + + # Get PGVectorDataPoint Table from database + PGVectorDataPoint = await self.get_table(collection_name) + + closest_items = [] + + # Use async session to connect to the database + async with self.get_async_session() as session: + # Find closest vectors to query_vector + closest_items = await session.execute( + select( + PGVectorDataPoint, + PGVectorDataPoint.c.vector.cosine_distance(query_vector).label( + "similarity" + ), + ) + .order_by("similarity") + ) + + vector_list = [] + + # Extract distances and find min/max for normalization + for vector in closest_items: + # TODO: Add normalization of similarity score + vector_list.append(vector) + + # Create and return ScoredResult objects + return [ + ScoredResult( + id = UUID(str(row.id)), + payload = row.payload, + score = row.similarity + ) for row in vector_list + ] + async def search( self, collection_name: str, diff --git a/cognee/infrastructure/databases/vector/pgvector/create_db_and_tables.py b/cognee/infrastructure/databases/vector/pgvector/create_db_and_tables.py index ef27e2889..f40299939 100644 --- a/cognee/infrastructure/databases/vector/pgvector/create_db_and_tables.py +++ b/cognee/infrastructure/databases/vector/pgvector/create_db_and_tables.py @@ -10,5 +10,3 @@ async def create_db_and_tables(): await vector_engine.create_database() async with vector_engine.engine.begin() as connection: await connection.execute(text("CREATE EXTENSION IF NOT EXISTS vector;")) - - diff --git a/cognee/infrastructure/databases/vector/utils.py b/cognee/infrastructure/databases/vector/utils.py new file mode 100644 index 000000000..ced161ea3 --- /dev/null +++ b/cognee/infrastructure/databases/vector/utils.py @@ -0,0 +1,26 @@ +from typing import List + + +def normalize_distances(result_values: List[dict]) -> List[float]: + min_value = 100 + max_value = 0 + + for result in result_values: + value = float(result["_distance"]) + if value > max_value: + max_value = value + if value < min_value: + min_value = value + + normalized_values = [] + min_value = min(result["_distance"] for result in result_values) + max_value = max(result["_distance"] for result in result_values) + + if max_value == min_value: + # Avoid division by zero: Assign all normalized values to 0 (or any constant value like 1) + normalized_values = [0 for _ in result_values] + else: + normalized_values = [(result["_distance"] - min_value) / (max_value - min_value) for result in + result_values] + + return normalized_values \ No newline at end of file diff --git a/cognee/infrastructure/engine/models/DataPoint.py b/cognee/infrastructure/engine/models/DataPoint.py index f8ea1c9f0..d08c52fa8 100644 --- a/cognee/infrastructure/engine/models/DataPoint.py +++ b/cognee/infrastructure/engine/models/DataPoint.py @@ -11,6 +11,7 @@ class DataPoint(BaseModel): __tablename__ = "data_point" id: UUID = Field(default_factory = uuid4) updated_at: Optional[datetime] = datetime.now(timezone.utc) + topological_rank: Optional[int] = 0 _metadata: Optional[MetaData] = { "index_fields": [] } diff --git a/cognee/infrastructure/llm/openai/adapter.py b/cognee/infrastructure/llm/openai/adapter.py index 28cdfff4e..1dc9b70f5 100644 --- a/cognee/infrastructure/llm/openai/adapter.py +++ b/cognee/infrastructure/llm/openai/adapter.py @@ -87,6 +87,9 @@ class OpenAIAdapter(LLMInterface): transcription = litellm.transcription( model = self.transcription_model, file = Path(input), + api_key=self.api_key, + api_base=self.endpoint, + api_version=self.api_version, max_retries = 5, ) @@ -112,6 +115,9 @@ class OpenAIAdapter(LLMInterface): }, ], }], + api_key=self.api_key, + api_base=self.endpoint, + api_version=self.api_version, max_tokens = 300, max_retries = 5, ) diff --git a/cognee/modules/graph/cognee_graph/CogneeGraph.py b/cognee/modules/graph/cognee_graph/CogneeGraph.py index d15d93b73..158fb9d07 100644 --- a/cognee/modules/graph/cognee_graph/CogneeGraph.py +++ b/cognee/modules/graph/cognee_graph/CogneeGraph.py @@ -1,9 +1,12 @@ -from typing import List, Dict, Union +import numpy as np +from typing import List, Dict, Union from cognee.infrastructure.databases.graph.graph_db_interface import GraphDBInterface from cognee.modules.graph.cognee_graph.CogneeGraphElements import Node, Edge from cognee.modules.graph.cognee_graph.CogneeAbstractGraph import CogneeAbstractGraph -from cognee.infrastructure.databases.graph import get_graph_engine +import heapq +from graphistry import edges + class CogneeGraph(CogneeAbstractGraph): """ @@ -39,26 +42,33 @@ class CogneeGraph(CogneeAbstractGraph): def get_node(self, node_id: str) -> Node: return self.nodes.get(node_id, None) - def get_edges(self, node_id: str) -> List[Edge]: + def get_edges_of_node(self, node_id: str) -> List[Edge]: node = self.get_node(node_id) if node: return node.skeleton_edges else: raise ValueError(f"Node with id {node_id} does not exist.") + def get_edges(self)-> List[Edge]: + return edges + async def project_graph_from_db(self, adapter: Union[GraphDBInterface], node_properties_to_project: List[str], edge_properties_to_project: List[str], directed = True, node_dimension = 1, - edge_dimension = 1) -> None: + edge_dimension = 1, + memory_fragment_filter = []) -> None: if node_dimension < 1 or edge_dimension < 1: raise ValueError("Dimensions must be positive integers") try: - nodes_data, edges_data = await adapter.get_graph_data() + if len(memory_fragment_filter) == 0: + nodes_data, edges_data = await adapter.get_graph_data() + else: + nodes_data, edges_data = await adapter.get_filtered_graph_data(attribute_filters = memory_fragment_filter) if not nodes_data: raise ValueError("No node data retrieved from the database.") @@ -89,3 +99,81 @@ class CogneeGraph(CogneeAbstractGraph): print(f"Error projecting graph: {e}") except Exception as ex: print(f"Unexpected error: {ex}") + + async def map_vector_distances_to_graph_nodes(self, node_distances) -> None: + for category, scored_results in node_distances.items(): + for scored_result in scored_results: + node_id = str(scored_result.id) + score = scored_result.score + node =self.get_node(node_id) + if node: + node.add_attribute("vector_distance", score) + else: + print(f"Node with id {node_id} not found in the graph.") + + async def map_vector_distances_to_graph_edges(self, vector_engine, query) -> None: # :TODO: When we calculate edge embeddings in vector db change this similarly to node mapping + try: + # Step 1: Generate the query embedding + query_vector = await vector_engine.embed_data([query]) + query_vector = query_vector[0] + if query_vector is None or len(query_vector) == 0: + raise ValueError("Failed to generate query embedding.") + + # Step 2: Collect all unique relationship types + unique_relationship_types = set() + for edge in self.edges: + relationship_type = edge.attributes.get('relationship_type') + if relationship_type: + unique_relationship_types.add(relationship_type) + + # Step 3: Embed all unique relationship types + unique_relationship_types = list(unique_relationship_types) + relationship_type_embeddings = await vector_engine.embed_data(unique_relationship_types) + + # Step 4: Map relationship types to their embeddings and calculate distances + embedding_map = {} + for relationship_type, embedding in zip(unique_relationship_types, relationship_type_embeddings): + edge_vector = np.array(embedding) + + # Calculate cosine similarity + similarity = np.dot(query_vector, edge_vector) / ( + np.linalg.norm(query_vector) * np.linalg.norm(edge_vector) + ) + distance = 1 - similarity + + # Round the distance to 4 decimal places and store it + embedding_map[relationship_type] = round(distance, 4) + + # Step 4: Assign precomputed distances to edges + for edge in self.edges: + relationship_type = edge.attributes.get('relationship_type') + if not relationship_type or relationship_type not in embedding_map: + print(f"Edge {edge} has an unknown or missing relationship type.") + continue + + # Assign the precomputed distance + edge.attributes["vector_distance"] = embedding_map[relationship_type] + + except Exception as ex: + print(f"Error mapping vector distances to edges: {ex}") + + + async def calculate_top_triplet_importances(self, k = int) -> List: + min_heap = [] + for i, edge in enumerate(self.edges): + source_node = self.get_node(edge.node1.id) + target_node = self.get_node(edge.node2.id) + + source_distance = source_node.attributes.get("vector_distance", 0) if source_node else 0 + target_distance = target_node.attributes.get("vector_distance", 0) if target_node else 0 + edge_distance = edge.attributes.get("vector_distance", 0) + + total_distance = source_distance + target_distance + edge_distance + + heapq.heappush(min_heap, (-total_distance, i, edge)) + if len(min_heap) > k: + heapq.heappop(min_heap) + + + return [edge for _, _, edge in sorted(min_heap)] + diff --git a/cognee/modules/graph/cognee_graph/CogneeGraphElements.py b/cognee/modules/graph/cognee_graph/CogneeGraphElements.py index 8235cb24d..cecb0a272 100644 --- a/cognee/modules/graph/cognee_graph/CogneeGraphElements.py +++ b/cognee/modules/graph/cognee_graph/CogneeGraphElements.py @@ -1,5 +1,5 @@ import numpy as np -from typing import List, Dict, Optional, Any +from typing import List, Dict, Optional, Any, Union class Node: """ @@ -21,6 +21,7 @@ class Node: raise ValueError("Dimension must be a positive integer") self.id = node_id self.attributes = attributes if attributes is not None else {} + self.attributes["vector_distance"] = float('inf') self.skeleton_neighbours = [] self.skeleton_edges = [] self.status = np.ones(dimension, dtype=int) @@ -55,6 +56,12 @@ class Node: raise ValueError(f"Dimension {dimension} is out of range. Valid range is 0 to {len(self.status) - 1}.") return self.status[dimension] == 1 + def add_attribute(self, key: str, value: Any) -> None: + self.attributes[key] = value + + def get_attribute(self, key: str) -> Union[str, int, float]: + return self.attributes[key] + def __repr__(self) -> str: return f"Node({self.id}, attributes={self.attributes})" @@ -87,6 +94,7 @@ class Edge: self.node1 = node1 self.node2 = node2 self.attributes = attributes if attributes is not None else {} + self.attributes["vector_distance"] = float('inf') self.directed = directed self.status = np.ones(dimension, dtype=int) @@ -95,6 +103,12 @@ class Edge: raise ValueError(f"Dimension {dimension} is out of range. Valid range is 0 to {len(self.status) - 1}.") return self.status[dimension] == 1 + def add_attribute(self, key: str, value: Any) -> None: + self.attributes[key] = value + + def get_attribute(self, key: str, value: Any) -> Union[str, int, float]: + return self.attributes[key] + def __repr__(self) -> str: direction = "->" if self.directed else "--" return f"Edge({self.node1.id} {direction} {self.node2.id}, attributes={self.attributes})" diff --git a/cognee/modules/graph/utils/__init__.py b/cognee/modules/graph/utils/__init__.py index 6fbe2ee99..c4fa0d654 100644 --- a/cognee/modules/graph/utils/__init__.py +++ b/cognee/modules/graph/utils/__init__.py @@ -2,3 +2,4 @@ from .expand_with_nodes_and_edges import expand_with_nodes_and_edges from .get_graph_from_model import get_graph_from_model from .get_model_instance_from_graph import get_model_instance_from_graph from .retrieve_existing_edges import retrieve_existing_edges +from .convert_node_to_data_point import convert_node_to_data_point diff --git a/cognee/modules/graph/utils/convert_node_to_data_point.py b/cognee/modules/graph/utils/convert_node_to_data_point.py new file mode 100644 index 000000000..292f53733 --- /dev/null +++ b/cognee/modules/graph/utils/convert_node_to_data_point.py @@ -0,0 +1,23 @@ +from cognee.infrastructure.engine import DataPoint + + +def convert_node_to_data_point(node_data: dict) -> DataPoint: + subclass = find_subclass_by_name(DataPoint, node_data["type"]) + + return subclass(**node_data) + + +def get_all_subclasses(cls): + subclasses = [] + for subclass in cls.__subclasses__(): + subclasses.append(subclass) + subclasses.extend(get_all_subclasses(subclass)) # Recursively get subclasses + + return subclasses + +def find_subclass_by_name(cls, name): + for subclass in get_all_subclasses(cls): + if subclass.__name__ == name: + return subclass + + return None diff --git a/cognee/modules/graph/utils/get_graph_from_model.py b/cognee/modules/graph/utils/get_graph_from_model.py index 29137ddc7..d2c6269ab 100644 --- a/cognee/modules/graph/utils/get_graph_from_model.py +++ b/cognee/modules/graph/utils/get_graph_from_model.py @@ -2,9 +2,18 @@ from datetime import datetime, timezone from cognee.infrastructure.engine import DataPoint from cognee.modules.storage.utils import copy_model -def get_graph_from_model(data_point: DataPoint, include_root = True, added_nodes = {}, added_edges = {}): +async def get_graph_from_model( + data_point: DataPoint, + include_root = True, + added_nodes = None, + added_edges = None, + visited_properties = None, +): nodes = [] edges = [] + added_nodes = added_nodes or {} + added_edges = added_edges or {} + visited_properties = visited_properties or {} data_point_properties = {} excluded_properties = set() @@ -13,10 +22,27 @@ def get_graph_from_model(data_point: DataPoint, include_root = True, added_nodes if field_name == "_metadata": continue + if field_value is None: + excluded_properties.add(field_name) + continue + if isinstance(field_value, DataPoint): excluded_properties.add(field_name) - property_nodes, property_edges = get_graph_from_model(field_value, True, added_nodes, added_edges) + property_key = f"{str(data_point.id)}{field_name}{str(field_value.id)}" + + if property_key in visited_properties: + return [], [] + + visited_properties[property_key] = 0 + + property_nodes, property_edges = await get_graph_from_model( + field_value, + True, + added_nodes, + added_edges, + visited_properties, + ) for node in property_nodes: if str(node.id) not in added_nodes: @@ -47,7 +73,20 @@ def get_graph_from_model(data_point: DataPoint, include_root = True, added_nodes excluded_properties.add(field_name) for item in field_value: - property_nodes, property_edges = get_graph_from_model(item, True, added_nodes, added_edges) + property_key = f"{str(data_point.id)}{field_name}{str(item.id)}" + + if property_key in visited_properties: + return [], [] + + visited_properties[property_key] = 0 + + property_nodes, property_edges = await get_graph_from_model( + item, + True, + added_nodes, + added_edges, + visited_properties, + ) for node in property_nodes: if str(node.id) not in added_nodes: diff --git a/cognee/pipelines/__init__.py b/cognee/pipelines/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/cognee/pipelines/retriever/__init__.py b/cognee/pipelines/retriever/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/cognee/pipelines/retriever/diffusion_retriever.py b/cognee/pipelines/retriever/diffusion_retriever.py new file mode 100644 index 000000000..a6b79310e --- /dev/null +++ b/cognee/pipelines/retriever/diffusion_retriever.py @@ -0,0 +1,25 @@ +from uuid import UUID +from enum import Enum +from typing import Callable, Dict +from cognee.shared.utils import send_telemetry +from cognee.modules.users.models import User +from cognee.modules.users.methods import get_default_user +from cognee.modules.users.permissions.methods import get_document_ids_for_user + +async def two_step_retriever(query: Dict[str, str], user: User = None) -> list: + if user is None: + user = await get_default_user() + + if user is None: + raise PermissionError("No user found in the system. Please create a user.") + + own_document_ids = await get_document_ids_for_user(user.id) + retrieved_results = await diffusion_retriever(query, user) + + filtered_search_results = [] + + + return retrieved_results + +async def diffusion_retriever(query: str, user, community_filter = []) -> list: + raise(NotImplementedError) \ No newline at end of file diff --git a/cognee/pipelines/retriever/g_retriever.py b/cognee/pipelines/retriever/g_retriever.py new file mode 100644 index 000000000..4b319acd9 --- /dev/null +++ b/cognee/pipelines/retriever/g_retriever.py @@ -0,0 +1,25 @@ +from uuid import UUID +from enum import Enum +from typing import Callable, Dict +from cognee.shared.utils import send_telemetry +from cognee.modules.users.models import User +from cognee.modules.users.methods import get_default_user +from cognee.modules.users.permissions.methods import get_document_ids_for_user + +async def two_step_retriever(query: Dict[str, str], user: User = None) -> list: + if user is None: + user = await get_default_user() + + if user is None: + raise PermissionError("No user found in the system. Please create a user.") + + own_document_ids = await get_document_ids_for_user(user.id) + retrieved_results = await g_retriever(query, user) + + filtered_search_results = [] + + + return retrieved_results + +async def g_retriever(query: str, user, community_filter = []) -> list: + raise(NotImplementedError) \ No newline at end of file diff --git a/cognee/pipelines/retriever/two_steps_retriever.py b/cognee/pipelines/retriever/two_steps_retriever.py new file mode 100644 index 000000000..92ef2be2e --- /dev/null +++ b/cognee/pipelines/retriever/two_steps_retriever.py @@ -0,0 +1,119 @@ +import asyncio +from uuid import UUID +from enum import Enum +from typing import Callable, Dict +from cognee.shared.utils import send_telemetry +from cognee.modules.users.models import User +from cognee.modules.users.methods import get_default_user +from cognee.modules.users.permissions.methods import get_document_ids_for_user +from cognee.modules.graph.cognee_graph.CogneeGraph import CogneeGraph +from cognee.infrastructure.databases.vector import get_vector_engine +from cognee.infrastructure.databases.graph import get_graph_engine + + +def format_triplets(edges): + print("\n\n\n") + def filter_attributes(obj, attributes): + """Helper function to filter out non-None properties, including nested dicts.""" + result = {} + for attr in attributes: + value = getattr(obj, attr, None) + if value is not None: + # If the value is a dict, extract relevant keys from it + if isinstance(value, dict): + nested_values = {k: v for k, v in value.items() if k in attributes and v is not None} + result[attr] = nested_values + else: + result[attr] = value + return result + + triplets = [] + for edge in edges: + node1 = edge.node1 + node2 = edge.node2 + edge_attributes = edge.attributes + node1_attributes = node1.attributes + node2_attributes = node2.attributes + + # Filter only non-None properties + node1_info = {key: value for key, value in node1_attributes.items() if value is not None} + node2_info = {key: value for key, value in node2_attributes.items() if value is not None} + edge_info = {key: value for key, value in edge_attributes.items() if value is not None} + + # Create the formatted triplet + triplet = ( + f"Node1: {node1_info}\n" + f"Edge: {edge_info}\n" + f"Node2: {node2_info}\n\n\n" # Add three blank lines for separation + ) + triplets.append(triplet) + + return "".join(triplets) + + +async def two_step_retriever(query: Dict[str, str], user: User = None) -> list: + if user is None: + user = await get_default_user() + + if user is None: + raise PermissionError("No user found in the system. Please create a user.") + + own_document_ids = await get_document_ids_for_user(user.id) + retrieved_results = await run_two_step_retriever(query, user) + + filtered_search_results = [] + + return retrieved_results + + +def delete_duplicated_vector_db_elements(collections, results): #:TODO: This is just for now to fix vector db duplicates + results_dict = {} + for collection, results in zip(collections, results): + seen_ids = set() + unique_results = [] + for result in results: + if result.id not in seen_ids: + unique_results.append(result) + seen_ids.add(result.id) + else: + print(f"Duplicate found in collection '{collection}': {result.id}") + results_dict[collection] = unique_results + + return results_dict + + +async def run_two_step_retriever(query: str, user, community_filter = []) -> list: + vector_engine = get_vector_engine() + graph_engine = await get_graph_engine() + + collections = ["Entity_name", "TextSummary_text", 'EntityType_name', 'DocumentChunk_text'] + results = await asyncio.gather( + *[vector_engine.get_distances_of_collection(collection, query_text=query) for collection in collections] + ) + + ############################################# This part is a quick fix til we don't fix the vector db inconsistency + node_distances = delete_duplicated_vector_db_elements(collections, results)# :TODO: Change when vector db is fixed + # results_dict = {collection: result for collection, result in zip(collections, results)} + ############################################## + + memory_fragment = CogneeGraph() + + await memory_fragment.project_graph_from_db(graph_engine, + node_properties_to_project=['id', + 'description', + 'name', + 'type', + 'text'], + edge_properties_to_project=['id', + 'relationship_name']) + + await memory_fragment.map_vector_distances_to_graph_nodes(node_distances=node_distances) + + await memory_fragment.map_vector_distances_to_graph_edges(vector_engine, query)# :TODO: This should be coming from vector db + + results = await memory_fragment.calculate_top_triplet_importances(k=5) + + print(format_triplets(results)) + print(f'Query was the following:{query}' ) + + return results diff --git a/cognee/shared/CodeGraphEntities.py b/cognee/shared/CodeGraphEntities.py index 9052cf89e..4811106e5 100644 --- a/cognee/shared/CodeGraphEntities.py +++ b/cognee/shared/CodeGraphEntities.py @@ -1,13 +1,28 @@ +from typing import List, Optional from cognee.infrastructure.engine import DataPoint class Repository(DataPoint): path: str + type: Optional[str] = "Repository" class CodeFile(DataPoint): extracted_id: str # actually file path + type: Optional[str] = "CodeFile" + source_code: Optional[str] = None + part_of: Optional[Repository] = None + depends_on: Optional[List["CodeFile"]] = None + depends_directly_on: Optional[List["CodeFile"]] = None + contains: Optional[List["CodePart"]] = None + + _metadata: dict = { + "index_fields": ["source_code"] + } + +class CodePart(DataPoint): type: str + # part_of: Optional[CodeFile] source_code: str - part_of: Repository + type: Optional[str] = "CodePart" _metadata: dict = { "index_fields": ["source_code"] @@ -18,3 +33,6 @@ class CodeRelationship(DataPoint): target_id: str type: str # between files relation: str # depends on or depends directly + +CodeFile.model_rebuild() +CodePart.model_rebuild() diff --git a/cognee/tasks/code/enrich_dependency_graph_checker.py b/cognee/tasks/code/enrich_dependency_graph_checker.py index 3e56fab59..7b04e0357 100644 --- a/cognee/tasks/code/enrich_dependency_graph_checker.py +++ b/cognee/tasks/code/enrich_dependency_graph_checker.py @@ -1,7 +1,7 @@ import os import asyncio import argparse -from cognee.tasks.repo_processor.get_repo_dependency_graph import get_repo_dependency_graph +from cognee.tasks.repo_processor.get_repo_file_dependencies import get_repo_file_dependencies from cognee.tasks.repo_processor.enrich_dependency_graph import enrich_dependency_graph @@ -15,7 +15,7 @@ def main(): print(f"Error: The provided repository path does not exist: {repo_path}") return - graph = asyncio.run(get_repo_dependency_graph(repo_path)) + graph = asyncio.run(get_repo_file_dependencies(repo_path)) graph = asyncio.run(enrich_dependency_graph(graph)) for node in graph.nodes: print(f"Node: {node}") diff --git a/cognee/tasks/code/expand_dependency_graph_checker.py b/cognee/tasks/code/expand_dependency_graph_checker.py index 1b3ce3246..bdef34162 100644 --- a/cognee/tasks/code/expand_dependency_graph_checker.py +++ b/cognee/tasks/code/expand_dependency_graph_checker.py @@ -1,7 +1,7 @@ import os import asyncio import argparse -from cognee.tasks.repo_processor.get_repo_dependency_graph import get_repo_dependency_graph +from cognee.tasks.repo_processor.get_repo_file_dependencies import get_repo_file_dependencies from cognee.tasks.repo_processor.enrich_dependency_graph import enrich_dependency_graph from cognee.tasks.repo_processor.expand_dependency_graph import expand_dependency_graph @@ -16,7 +16,7 @@ def main(): print(f"Error: The provided repository path does not exist: {repo_path}") return - graph = asyncio.run(get_repo_dependency_graph(repo_path)) + graph = asyncio.run(get_repo_file_dependencies(repo_path)) graph = asyncio.run(enrich_dependency_graph(graph)) graph = expand_dependency_graph(graph) for node in graph.nodes: diff --git a/cognee/tasks/code/get_repo_dependency_graph_checker.py b/cognee/tasks/code/get_repo_dependency_graph_checker.py index 57afc7274..3a393d3f3 100644 --- a/cognee/tasks/code/get_repo_dependency_graph_checker.py +++ b/cognee/tasks/code/get_repo_dependency_graph_checker.py @@ -1,7 +1,7 @@ import os import asyncio import argparse -from cognee.tasks.repo_processor.get_repo_dependency_graph import get_repo_dependency_graph +from cognee.tasks.repo_processor.get_repo_file_dependencies import get_repo_file_dependencies def main(): @@ -14,7 +14,7 @@ def main(): print(f"Error: The provided repository path does not exist: {repo_path}") return - graph = asyncio.run(get_repo_dependency_graph(repo_path)) + graph = asyncio.run(get_repo_file_dependencies(repo_path)) for node in graph.nodes: print(f"Node: {node}") diff --git a/cognee/tasks/documents/classify_documents.py b/cognee/tasks/documents/classify_documents.py index d881514a2..8ee87bcad 100644 --- a/cognee/tasks/documents/classify_documents.py +++ b/cognee/tasks/documents/classify_documents.py @@ -1,16 +1,51 @@ from cognee.modules.data.models import Data -from cognee.modules.data.processing.document_types import Document, PdfDocument, AudioDocument, ImageDocument, TextDocument +from cognee.modules.data.processing.document_types import ( + Document, + PdfDocument, + AudioDocument, + ImageDocument, + TextDocument, +) EXTENSION_TO_DOCUMENT_CLASS = { - "pdf": PdfDocument, - "audio": AudioDocument, - "image": ImageDocument, - "txt": TextDocument + "pdf": PdfDocument, # Text documents + "txt": TextDocument, + "png": ImageDocument, # Image documents + "dwg": ImageDocument, + "xcf": ImageDocument, + "jpg": ImageDocument, + "jpx": ImageDocument, + "apng": ImageDocument, + "gif": ImageDocument, + "webp": ImageDocument, + "cr2": ImageDocument, + "tif": ImageDocument, + "bmp": ImageDocument, + "jxr": ImageDocument, + "psd": ImageDocument, + "ico": ImageDocument, + "heic": ImageDocument, + "avif": ImageDocument, + "aac": AudioDocument, # Audio documents + "mid": AudioDocument, + "mp3": AudioDocument, + "m4a": AudioDocument, + "ogg": AudioDocument, + "flac": AudioDocument, + "wav": AudioDocument, + "amr": AudioDocument, + "aiff": AudioDocument, } + def classify_documents(data_documents: list[Data]) -> list[Document]: documents = [ - EXTENSION_TO_DOCUMENT_CLASS[data_item.extension](id = data_item.id, title=f"{data_item.name}.{data_item.extension}", raw_data_location=data_item.raw_data_location, name=data_item.name) + EXTENSION_TO_DOCUMENT_CLASS[data_item.extension]( + id=data_item.id, + title=f"{data_item.name}.{data_item.extension}", + raw_data_location=data_item.raw_data_location, + name=data_item.name, + ) for data_item in data_documents ] return documents diff --git a/cognee/tasks/graph/convert_graph_from_code_graph.py b/cognee/tasks/graph/convert_graph_from_code_graph.py deleted file mode 100644 index bc8544994..000000000 --- a/cognee/tasks/graph/convert_graph_from_code_graph.py +++ /dev/null @@ -1,54 +0,0 @@ -import os -import networkx as nx - -from cognee.shared.CodeGraphEntities import CodeFile, CodeRelationship, Repository -from cognee.tasks.storage import add_data_points - - -async def convert_graph_from_code_graph( - graph: nx.DiGraph, repo_path: str -) -> tuple[str, list[CodeFile], list[CodeRelationship]]: - code_objects = code_objects_from_di_graph(graph, repo_path) - - add_data_points(code_objects) - - return code_objects - - -def create_code_file(path, type, repo): - abspath = os.path.abspath(path) - - with open(abspath, "r") as f: - source_code = f.read() - - code_file = CodeFile( - extracted_id = abspath, - type = type, - source_code = source_code, - part_of = repo, - ) - - return code_file - - -def code_objects_from_di_graph( - graph: nx.DiGraph, repo_path: str -) -> tuple[Repository, list[CodeFile], list[CodeRelationship]]: - repo = Repository(path=repo_path) - - code_files = [ - create_code_file(os.path.join(repo_path, path), "python_file", repo) - for path in graph.nodes - ] - - code_relationships = [ - CodeRelationship( - os.path.join(repo_path, source), - os.path.join(repo_path, target), - "python_file", - graph.get_edge_data(source, target)["relation"], - ) - for source, target in graph.edges - ] - - return (repo, code_files, code_relationships) diff --git a/cognee/tasks/graph/extract_graph_from_data.py b/cognee/tasks/graph/extract_graph_from_data.py index ad6ae19d2..2bf4aeba4 100644 --- a/cognee/tasks/graph/extract_graph_from_data.py +++ b/cognee/tasks/graph/extract_graph_from_data.py @@ -24,11 +24,13 @@ async def extract_graph_from_data( (chunk, chunk_graph) for chunk, chunk_graph in zip(data_chunks, chunk_graphs) ] existing_edges_map = await retrieve_existing_edges( - chunk_and_chunk_graphs, graph_engine + chunk_and_chunk_graphs, + graph_engine, ) graph_nodes, graph_edges = expand_with_nodes_and_edges( - chunk_and_chunk_graphs, existing_edges_map + chunk_and_chunk_graphs, + existing_edges_map, ) if len(graph_nodes) > 0: diff --git a/cognee/tasks/repo_processor/__init__.py b/cognee/tasks/repo_processor/__init__.py index a1aeabcdc..05e111b29 100644 --- a/cognee/tasks/repo_processor/__init__.py +++ b/cognee/tasks/repo_processor/__init__.py @@ -4,4 +4,4 @@ logger = logging.getLogger("task:repo_processor") from .enrich_dependency_graph import enrich_dependency_graph from .expand_dependency_graph import expand_dependency_graph -from .get_repo_dependency_graph import get_repo_dependency_graph +from .get_repo_file_dependencies import get_repo_file_dependencies diff --git a/cognee/tasks/repo_processor/enrich_dependency_graph.py b/cognee/tasks/repo_processor/enrich_dependency_graph.py index c3aa9fd74..ba222ef3f 100644 --- a/cognee/tasks/repo_processor/enrich_dependency_graph.py +++ b/cognee/tasks/repo_processor/enrich_dependency_graph.py @@ -1,25 +1,38 @@ +import asyncio import networkx as nx from typing import Dict, List +from tqdm.asyncio import tqdm + +from cognee.infrastructure.engine import DataPoint +from cognee.shared.CodeGraphEntities import CodeFile +from cognee.modules.graph.utils import get_graph_from_model, convert_node_to_data_point +from cognee.infrastructure.databases.graph import get_graph_engine def topologically_sort_subgraph(subgraph_node_to_indegree: Dict[str, int], graph: nx.DiGraph) -> List[str]: """Performs a topological sort on a subgraph based on node indegrees.""" results = [] remaining_nodes = subgraph_node_to_indegree.copy() + while remaining_nodes: next_node = min(remaining_nodes, key=remaining_nodes.get) results.append(next_node) + for successor in graph.successors(next_node): if successor in remaining_nodes: remaining_nodes[successor] -= 1 + remaining_nodes.pop(next_node) + return results def topologically_sort(graph: nx.DiGraph) -> List[str]: """Performs a topological sort on the entire graph.""" subgraphs = (graph.subgraph(c).copy() for c in nx.weakly_connected_components(graph)) + topological_order = [] + for subgraph in subgraphs: node_to_indegree = { node: len(list(subgraph.successors(node))) @@ -28,29 +41,84 @@ def topologically_sort(graph: nx.DiGraph) -> List[str]: topological_order.extend( topologically_sort_subgraph(node_to_indegree, subgraph) ) + return topological_order -def node_enrich_and_connect(graph: nx.MultiDiGraph, topological_order: List[str], node: str) -> None: +async def node_enrich_and_connect( + graph: nx.MultiDiGraph, + topological_order: List[str], + node: CodeFile, + data_points_map: Dict[str, DataPoint], +) -> None: """Adds 'depends_on' edges to the graph based on topological order.""" - topological_rank = topological_order.index(node) - graph.nodes[node]['topological_rank'] = topological_rank - node_descendants = nx.descendants(graph, node) - if graph.has_edge(node,node): - node_descendants.add(node) - for desc in node_descendants: - if desc not in topological_order[:topological_rank+1]: + topological_rank = topological_order.index(node.id) + node.topological_rank = topological_rank + node_descendants = nx.descendants(graph, node.id) + + if graph.has_edge(node.id, node.id): + node_descendants.add(node.id) + + new_connections = [] + graph_engine = await get_graph_engine() + + for desc_id in node_descendants: + if desc_id not in topological_order[:topological_rank + 1]: continue - graph.add_edge(node, desc, relation='depends_on') -async def enrich_dependency_graph(graph: nx.DiGraph) -> nx.MultiDiGraph: + if desc_id in data_points_map: + desc = data_points_map[desc_id] + else: + node_data = await graph_engine.extract_node(desc_id) + desc = convert_node_to_data_point(node_data) + + new_connections.append(desc) + + node.depends_directly_on = node.depends_directly_on or [] + node.depends_directly_on.extend(new_connections) + + +async def enrich_dependency_graph(data_points: list[DataPoint]) -> list[DataPoint]: """Enriches the graph with topological ranks and 'depends_on' edges.""" - graph = nx.MultiDiGraph(graph) + nodes = [] + edges = [] + + for data_point in data_points: + graph_nodes, graph_edges = await get_graph_from_model(data_point) + nodes.extend(graph_nodes) + edges.extend(graph_edges) + + graph = nx.MultiDiGraph() + + simple_nodes = [(node.id, node.model_dump()) for node in nodes] + + graph.add_nodes_from(simple_nodes) + + graph.add_edges_from(edges) + topological_order = topologically_sort(graph) + node_rank_map = {node: idx for idx, node in enumerate(topological_order)} - for node in graph.nodes: - if node not in node_rank_map: + + # for node_id, node in tqdm(graph.nodes(data = True), desc = "Enriching dependency graph", unit = "node"): + # if node_id not in node_rank_map: + # continue + + # data_points.append(node_enrich_and_connect(graph, topological_order, node)) + + data_points_map = {data_point.id: data_point for data_point in data_points} + data_points_futures = [] + + for data_point in tqdm(data_points, desc = "Enriching dependency graph", unit = "data_point"): + if data_point.id not in node_rank_map: continue - node_enrich_and_connect(graph, topological_order, node) - return graph + + if isinstance(data_point, CodeFile): + data_points_futures.append(node_enrich_and_connect(graph, topological_order, data_point, data_points_map)) + + # yield data_point + + await asyncio.gather(*data_points_futures) + + return data_points diff --git a/cognee/tasks/repo_processor/expand_dependency_graph.py b/cognee/tasks/repo_processor/expand_dependency_graph.py index a6b724302..722bfa5c6 100644 --- a/cognee/tasks/repo_processor/expand_dependency_graph.py +++ b/cognee/tasks/repo_processor/expand_dependency_graph.py @@ -1,28 +1,43 @@ -import networkx as nx - +from uuid import NAMESPACE_OID, uuid5 +# from tqdm import tqdm +from cognee.infrastructure.engine import DataPoint +from cognee.shared.CodeGraphEntities import CodeFile, CodePart from cognee.tasks.repo_processor.extract_code_parts import extract_code_parts from cognee.tasks.repo_processor import logger - -def _add_code_parts_nodes_and_edges(graph, parent_node_id, part_type, code_parts): +def _add_code_parts_nodes_and_edges(code_file: CodeFile, part_type, code_parts) -> None: """Add code part nodes and edges for a specific part type.""" if not code_parts: - logger.debug(f"No code parts to add for parent_node_id {parent_node_id} and part_type {part_type}.") + logger.debug(f"No code parts to add for node {code_file.id} and part_type {part_type}.") return + part_nodes = [] + for idx, code_part in enumerate(code_parts): if not code_part.strip(): - logger.warning(f"Empty code part in parent_node_id {parent_node_id} and part_type {part_type}.") + logger.warning(f"Empty code part in node {code_file.id} and part_type {part_type}.") continue - part_node_id = f"{parent_node_id}_{part_type}_{idx}" - graph.add_node(part_node_id, source_code=code_part, node_type=part_type) - graph.add_edge(parent_node_id, part_node_id, relation="contains") + + part_node_id = uuid5(NAMESPACE_OID, f"{code_file.id}_{part_type}_{idx}") + + part_nodes.append(CodePart( + id = part_node_id, + type = part_type, + # part_of = code_file, + source_code = code_part, + )) + + # graph.add_node(part_node_id, source_code=code_part, node_type=part_type) + # graph.add_edge(parent_node_id, part_node_id, relation="contains") + + code_file.contains = code_file.contains or [] + code_file.contains.extend(part_nodes) -def _process_single_node(graph, node_id, node_data): +def _process_single_node(code_file: CodeFile) -> None: """Process a single Python file node.""" - graph.nodes[node_id]["node_type"] = "python_file" - source_code = node_data.get("source_code", "") + node_id = code_file.id + source_code = code_file.source_code if not source_code.strip(): logger.warning(f"Node {node_id} has no or empty 'source_code'. Skipping.") @@ -35,15 +50,14 @@ def _process_single_node(graph, node_id, node_data): return for part_type, code_parts in code_parts_dict.items(): - _add_code_parts_nodes_and_edges(graph, node_id, part_type, code_parts) + _add_code_parts_nodes_and_edges(code_file, part_type, code_parts) -def expand_dependency_graph(graph: nx.MultiDiGraph) -> nx.MultiDiGraph: +async def expand_dependency_graph(data_points: list[DataPoint]) -> list[DataPoint]: """Process Python file nodes, adding code part nodes and edges.""" - expanded_graph = graph.copy() - for node_id, node_data in graph.nodes(data=True): - if not node_data: # Check if node_data is empty - logger.warning(f"Node {node_id} has no data. Skipping.") - continue - _process_single_node(expanded_graph, node_id, node_data) - return expanded_graph + # for data_point in tqdm(data_points, desc = "Expand dependency graph", unit = "data_point"): + for data_point in data_points: + if isinstance(data_point, CodeFile): + _process_single_node(data_point) + + return data_points diff --git a/cognee/tasks/repo_processor/get_repo_dependency_graph.py b/cognee/tasks/repo_processor/get_repo_dependency_graph.py deleted file mode 100644 index 7f96bd49c..000000000 --- a/cognee/tasks/repo_processor/get_repo_dependency_graph.py +++ /dev/null @@ -1,61 +0,0 @@ -import os -import aiofiles -import networkx as nx - -from cognee.tasks.repo_processor.get_local_dependencies import get_local_script_dependencies - - -async def get_py_path_and_source(file_path, repo_path): - relative_path = os.path.relpath(file_path, repo_path) - try: - async with aiofiles.open(file_path, "r", encoding="utf-8") as f: - source_code = await f.read() - return relative_path, source_code - except Exception as e: - print(f"Error reading file {file_path}: {e}") - return relative_path, None - - -async def get_py_files_dict(repo_path): - """Get .py files and their source code""" - if not os.path.exists(repo_path): - return {} - - py_files_paths = ( - os.path.join(root, file) - for root, _, files in os.walk(repo_path) for file in files if file.endswith(".py") - ) - - py_files_dict = {} - for file_path in py_files_paths: - relative_path, source_code = await get_py_path_and_source(file_path, repo_path) - py_files_dict[relative_path] = {"source_code": source_code} - - return py_files_dict - - -def get_edge(file_path: str, dependency: str, repo_path: str, relative_paths: bool = True) -> tuple: - if relative_paths: - file_path = os.path.relpath(file_path, repo_path) - dependency = os.path.relpath(dependency, repo_path) - return (file_path, dependency, {"relation": "depends_directly_on"}) - - -async def get_repo_dependency_graph(repo_path: str) -> nx.DiGraph: - """Generate a dependency graph for Python files in the given repository path.""" - py_files_dict = await get_py_files_dict(repo_path) - - dependency_graph = nx.DiGraph() - - dependency_graph.add_nodes_from(py_files_dict.items()) - - for file_path, metadata in py_files_dict.items(): - source_code = metadata.get("source_code") - if source_code is None: - continue - - dependencies = await get_local_script_dependencies(os.path.join(repo_path, file_path), repo_path) - dependency_edges = [get_edge(file_path, dependency, repo_path) for dependency in dependencies] - dependency_graph.add_edges_from(dependency_edges) - - return dependency_graph diff --git a/cognee/tasks/repo_processor/get_repo_file_dependencies.py b/cognee/tasks/repo_processor/get_repo_file_dependencies.py new file mode 100644 index 000000000..58f3857a9 --- /dev/null +++ b/cognee/tasks/repo_processor/get_repo_file_dependencies.py @@ -0,0 +1,87 @@ +import os +from uuid import NAMESPACE_OID, uuid5 +import aiofiles +from tqdm.asyncio import tqdm + +from cognee.infrastructure.engine import DataPoint +from cognee.shared.CodeGraphEntities import CodeFile, Repository +from cognee.tasks.repo_processor.get_local_dependencies import get_local_script_dependencies + + +async def get_py_path_and_source(file_path): + try: + async with aiofiles.open(file_path, "r", encoding="utf-8") as f: + source_code = await f.read() + return file_path, source_code + except Exception as e: + print(f"Error reading file {file_path}: {e}") + return file_path, None + + +async def get_py_files_dict(repo_path): + """Get .py files and their source code""" + if not os.path.exists(repo_path): + return {} + + py_files_paths = ( + os.path.join(root, file) + for root, _, files in os.walk(repo_path) for file in files if file.endswith(".py") + ) + + py_files_dict = {} + for file_path in py_files_paths: + absolute_path = os.path.abspath(file_path) + relative_path, source_code = await get_py_path_and_source(absolute_path) + py_files_dict[relative_path] = {"source_code": source_code} + + return py_files_dict + + +def get_edge(file_path: str, dependency: str, repo_path: str, relative_paths: bool = False) -> tuple: + if relative_paths: + file_path = os.path.relpath(file_path, repo_path) + dependency = os.path.relpath(dependency, repo_path) + return (file_path, dependency, {"relation": "depends_directly_on"}) + + +async def get_repo_file_dependencies(repo_path: str) -> list[DataPoint]: + """Generate a dependency graph for Python files in the given repository path.""" + py_files_dict = await get_py_files_dict(repo_path) + + repo = Repository( + id = uuid5(NAMESPACE_OID, repo_path), + path = repo_path, + ) + + data_points = [repo] + + # dependency_graph = nx.DiGraph() + + # dependency_graph.add_nodes_from(py_files_dict.items()) + + async for file_path, metadata in tqdm(py_files_dict.items(), desc="Repo dependency graph", unit="file"): + source_code = metadata.get("source_code") + if source_code is None: + continue + + dependencies = await get_local_script_dependencies(os.path.join(repo_path, file_path), repo_path) + + data_points.append(CodeFile( + id = uuid5(NAMESPACE_OID, file_path), + source_code = source_code, + extracted_id = file_path, + part_of = repo, + depends_on = [ + CodeFile( + id = uuid5(NAMESPACE_OID, dependency), + extracted_id = dependency, + part_of = repo, + ) for dependency in dependencies + ] if len(dependencies) else None, + )) + # dependency_edges = [get_edge(file_path, dependency, repo_path) for dependency in dependencies] + + # dependency_graph.add_edges_from(dependency_edges) + + return data_points + # return dependency_graph diff --git a/cognee/tasks/storage/add_data_points.py b/cognee/tasks/storage/add_data_points.py index b803c9dfd..33f9d7a70 100644 --- a/cognee/tasks/storage/add_data_points.py +++ b/cognee/tasks/storage/add_data_points.py @@ -1,3 +1,4 @@ +import asyncio from cognee.infrastructure.engine import DataPoint from cognee.infrastructure.databases.graph import get_graph_engine from cognee.modules.graph.utils import get_graph_from_model @@ -8,11 +9,13 @@ async def add_data_points(data_points: list[DataPoint]): nodes = [] edges = [] - for data_point in data_points: - property_nodes, property_edges = get_graph_from_model(data_point) + results = await asyncio.gather(*[ + get_graph_from_model(data_point) for data_point in data_points + ]) - nodes.extend(property_nodes) - edges.extend(property_edges) + for result_nodes, result_edges in results: + nodes.extend(result_nodes) + edges.extend(result_edges) graph_engine = await get_graph_engine() diff --git a/cognee/tasks/storage/index_data_points.py b/cognee/tasks/storage/index_data_points.py index 12903173a..03ad30f9d 100644 --- a/cognee/tasks/storage/index_data_points.py +++ b/cognee/tasks/storage/index_data_points.py @@ -16,6 +16,9 @@ async def index_data_points(data_points: list[DataPoint]): data_point_type = type(data_point) for field_name in data_point._metadata["index_fields"]: + if getattr(data_point, field_name, None) is None: + continue + index_name = f"{data_point_type.__tablename__}.{field_name}" if index_name not in created_indexes: @@ -35,12 +38,21 @@ async def index_data_points(data_points: list[DataPoint]): return data_points -def get_data_points_from_model(data_point: DataPoint, added_data_points = {}) -> list[DataPoint]: +def get_data_points_from_model(data_point: DataPoint, added_data_points = None, visited_properties = None) -> list[DataPoint]: data_points = [] + added_data_points = added_data_points or {} + visited_properties = visited_properties or {} for field_name, field_value in data_point: if isinstance(field_value, DataPoint): - new_data_points = get_data_points_from_model(field_value, added_data_points) + property_key = f"{str(data_point.id)}{field_name}{str(field_value.id)}" + + if property_key in visited_properties: + return [] + + visited_properties[property_key] = True + + new_data_points = get_data_points_from_model(field_value, added_data_points, visited_properties) for new_point in new_data_points: if str(new_point.id) not in added_data_points: @@ -49,7 +61,14 @@ def get_data_points_from_model(data_point: DataPoint, added_data_points = {}) -> if isinstance(field_value, list) and len(field_value) > 0 and isinstance(field_value[0], DataPoint): for field_value_item in field_value: - new_data_points = get_data_points_from_model(field_value_item, added_data_points) + property_key = f"{str(data_point.id)}{field_name}{str(field_value_item.id)}" + + if property_key in visited_properties: + return [] + + visited_properties[property_key] = True + + new_data_points = get_data_points_from_model(field_value_item, added_data_points, visited_properties) for new_point in new_data_points: if str(new_point.id) not in added_data_points: @@ -79,4 +98,3 @@ if __name__ == "__main__": data_points = get_data_points_from_model(person) print(data_points) - \ No newline at end of file diff --git a/cognee/tasks/summarization/summarize_code.py b/cognee/tasks/summarization/summarize_code.py index 31b86d325..277081f40 100644 --- a/cognee/tasks/summarization/summarize_code.py +++ b/cognee/tasks/summarization/summarize_code.py @@ -4,6 +4,7 @@ from uuid import uuid5 from pydantic import BaseModel +from cognee.infrastructure.engine import DataPoint from cognee.modules.data.extraction.extract_summary import extract_summary from cognee.shared.CodeGraphEntities import CodeFile from cognee.tasks.storage import add_data_points @@ -12,13 +13,16 @@ from .models import CodeSummary async def summarize_code( - code_files: list[CodeFile], summarization_model: Type[BaseModel] -) -> list[CodeFile]: + code_files: list[DataPoint], + summarization_model: Type[BaseModel], +) -> list[DataPoint]: if len(code_files) == 0: return code_files + code_files_data_points = [file for file in code_files if isinstance(file, CodeFile)] + file_summaries = await asyncio.gather( - *[extract_summary(file.source_code, summarization_model) for file in code_files] + *[extract_summary(file.source_code, summarization_model) for file in code_files_data_points] ) summaries = [ @@ -27,9 +31,9 @@ async def summarize_code( made_from = file, text = file_summaries[file_index].summary, ) - for (file_index, file) in enumerate(code_files) + for (file_index, file) in enumerate(code_files_data_points) ] await add_data_points(summaries) - return code_files, summaries + return code_files diff --git a/cognee/tests/tasks/graph/code_graph_test_data_generation.py b/cognee/tests/tasks/graph/code_graph_test_data_generation.py deleted file mode 100644 index 74ca2de71..000000000 --- a/cognee/tests/tasks/graph/code_graph_test_data_generation.py +++ /dev/null @@ -1,51 +0,0 @@ -import random -import string - -import numpy as np - -from cognee.shared.CodeGraphEntities import CodeFile, CodeRelationship - - -def random_str(n, spaces=True): - candidates = string.ascii_letters + string.digits - if spaces: - candidates += " " - return "".join(random.choice(candidates) for _ in range(n)) - - -def code_graph_test_data_generation(): - nodes = [ - CodeFile( - extracted_id=random_str(10, spaces=False), - type="file", - source_code=random_str(random.randrange(50, 500)), - ) - for _ in range(100) - ] - n_nodes = len(nodes) - first_source = np.random.randint(0, n_nodes) - reached_nodes = {first_source} - last_iteration = [first_source] - edges = [] - while len(reached_nodes) < n_nodes: - for source in last_iteration: - last_iteration = [] - tries = 0 - while ((len(last_iteration) == 0 or tries < 500)) and ( - len(reached_nodes) < n_nodes - ): - tries += 1 - target = np.random.randint(n_nodes) - if target not in reached_nodes: - last_iteration.append(target) - edges.append( - CodeRelationship( - source_id=nodes[source].extracted_id, - target_id=nodes[target].extracted_id, - type="files", - relation="depends", - ) - ) - reached_nodes = reached_nodes.union(set(last_iteration)) - - return (nodes, edges) diff --git a/cognee/tests/tasks/graph/convert_graph_from_code_graph_test.py b/cognee/tests/tasks/graph/convert_graph_from_code_graph_test.py deleted file mode 100644 index 755840b01..000000000 --- a/cognee/tests/tasks/graph/convert_graph_from_code_graph_test.py +++ /dev/null @@ -1,27 +0,0 @@ -import asyncio - -import pytest - -from cognee.shared.CodeGraphEntities import Repository -from cognee.tasks.graph.convert_graph_from_code_graph import ( - convert_graph_from_code_graph, -) -from cognee.tests.tasks.graph.code_graph_test_data_generation import ( - code_graph_test_data_generation, -) - - -def test_convert_graph_from_code_graph(): - repo = Repository(path="test/repo/path") - nodes, edges = code_graph_test_data_generation() - repo_out, nodes_out, edges_out = asyncio.run( - convert_graph_from_code_graph(repo, nodes, edges) - ) - - assert repo == repo_out, f"{repo = } != {repo_out = }" - - for node_in, node_out in zip(nodes, nodes_out): - assert node_in == node_out, f"{node_in = } != {node_out = }" - - for edge_in, edge_out in zip(edges, edges_out): - assert edge_in == edge_out, f"{edge_in = } != {edge_out = }" diff --git a/cognee/tests/unit/interfaces/graph/get_graph_from_huge_model_test.py b/cognee/tests/unit/interfaces/graph/get_graph_from_huge_model_test.py new file mode 100644 index 000000000..f75b84826 --- /dev/null +++ b/cognee/tests/unit/interfaces/graph/get_graph_from_huge_model_test.py @@ -0,0 +1,100 @@ +import asyncio +import random +import time +from typing import List +from uuid import uuid5, NAMESPACE_OID + +from cognee.infrastructure.engine import DataPoint +from cognee.modules.graph.utils import get_graph_from_model + +random.seed(1500) + +class Repository(DataPoint): + path: str + +class CodeFile(DataPoint): + part_of: Repository + contains: List["CodePart"] = [] + depends_on: List["CodeFile"] = [] + source_code: str + +class CodePart(DataPoint): + part_of: CodeFile + source_code: str + +CodeFile.model_rebuild() +CodePart.model_rebuild() + + +def nanoseconds_to_largest_unit(nanoseconds): + # Define conversion factors + conversion_factors = { + 'weeks': 7 * 24 * 60 * 60 * 1e9, + 'days': 24 * 60 * 60 * 1e9, + 'hours': 60 * 60 * 1e9, + 'minutes': 60 * 1e9, + 'seconds': 1e9, + 'miliseconds': 1e6, + 'microseconds': 1e3, + } + + # Iterate through conversion factors to find the largest unit + for unit, factor in conversion_factors.items(): + converted_value = nanoseconds / factor + if converted_value >= 1: + return converted_value, unit + + # If nanoseconds is smaller than a second + return nanoseconds, 'nanoseconds' + + +async def test_circular_reference_extraction(): + repo = Repository(path = "repo1") + + code_files = [CodeFile( + id = uuid5(NAMESPACE_OID, f"file{file_index}"), + source_code = "source code", + part_of = repo, + contains = [], + depends_on = [CodeFile( + id = uuid5(NAMESPACE_OID, f"file{random_id}"), + source_code = "source code", + part_of = repo, + depends_on = [], + ) for random_id in [random.randint(0, 1499) for _ in range(random.randint(0, 5))]], + ) for file_index in range(1500)] + + for code_file in code_files: + code_file.contains.extend([CodePart( + part_of = code_file, + source_code = f"Part {part_index}", + ) for part_index in range(random.randint(1, 20))]) + + nodes = [] + edges = [] + + start = time.perf_counter_ns() + + results = await asyncio.gather(*[ + get_graph_from_model(code_file) for code_file in code_files + ]) + + time_to_run = time.perf_counter_ns() - start + + print(nanoseconds_to_largest_unit(time_to_run)) + + for result_nodes, result_edges in results: + nodes.extend(result_nodes) + edges.extend(result_edges) + + # for code_file in code_files: + # model_nodes, model_edges = get_graph_from_model(code_file) + + # nodes.extend(model_nodes) + # edges.extend(model_edges) + + assert len(nodes) == 1501 + assert len(edges) == 1501 * 20 + 1500 * 5 + +if __name__ == "__main__": + asyncio.run(test_circular_reference_extraction()) diff --git a/cognee/tests/unit/modules/graph/cognee_graph_elements_test.py b/cognee/tests/unit/modules/graph/cognee_graph_elements_test.py index d2a1b6c59..a3755a58f 100644 --- a/cognee/tests/unit/modules/graph/cognee_graph_elements_test.py +++ b/cognee/tests/unit/modules/graph/cognee_graph_elements_test.py @@ -8,7 +8,7 @@ def test_node_initialization(): """Test that a Node is initialized correctly.""" node = Node("node1", {"attr1": "value1"}, dimension=2) assert node.id == "node1" - assert node.attributes == {"attr1": "value1"} + assert node.attributes == {"attr1": "value1", 'vector_distance': np.inf} assert len(node.status) == 2 assert np.all(node.status == 1) @@ -95,7 +95,7 @@ def test_edge_initialization(): edge = Edge(node1, node2, {"weight": 10}, directed=False, dimension=2) assert edge.node1 == node1 assert edge.node2 == node2 - assert edge.attributes == {"weight": 10} + assert edge.attributes == {'vector_distance': np.inf,"weight": 10} assert edge.directed is False assert len(edge.status) == 2 assert np.all(edge.status == 1) diff --git a/cognee/tests/unit/modules/graph/cognee_graph_test.py b/cognee/tests/unit/modules/graph/cognee_graph_test.py index d05292d75..bad474023 100644 --- a/cognee/tests/unit/modules/graph/cognee_graph_test.py +++ b/cognee/tests/unit/modules/graph/cognee_graph_test.py @@ -77,11 +77,11 @@ def test_get_edges_success(setup_graph): graph.add_node(node2) edge = Edge(node1, node2) graph.add_edge(edge) - assert edge in graph.get_edges("node1") + assert edge in graph.get_edges_of_node("node1") def test_get_edges_nonexistent_node(setup_graph): """Test retrieving edges for a nonexistent node raises an exception.""" graph = setup_graph with pytest.raises(ValueError, match="Node with id nonexistent does not exist."): - graph.get_edges("nonexistent") + graph.get_edges_of_node("nonexistent") diff --git a/docker-compose.yml b/docker-compose.yml index afb216169..9c40979bc 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -46,7 +46,7 @@ services: - 7687:7687 environment: - NEO4J_AUTH=neo4j/pleaseletmein - - NEO4J_PLUGINS=["apoc"] + - NEO4J_PLUGINS=["apoc", "graph-data-science"] networks: - cognee-network diff --git a/evals/eval_swe_bench.py b/evals/eval_swe_bench.py index ec93bda07..0a4806e3f 100644 --- a/evals/eval_swe_bench.py +++ b/evals/eval_swe_bench.py @@ -8,30 +8,63 @@ from swebench.harness.utils import load_swebench_dataset from swebench.inference.make_datasets.create_instance import PATCH_EXAMPLE import cognee + +from cognee.shared.data_models import SummarizedContent +from cognee.shared.utils import render_graph +from cognee.tasks.repo_processor import ( + enrich_dependency_graph, + expand_dependency_graph, + get_repo_file_dependencies, +) +from cognee.tasks.storage import add_data_points +from cognee.tasks.summarization import summarize_code +from cognee.modules.pipelines import Task, run_tasks from cognee.api.v1.cognify.code_graph_pipeline import code_graph_pipeline from cognee.api.v1.search import SearchType from cognee.infrastructure.databases.graph import get_graph_engine from cognee.infrastructure.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.prompts import read_query_prompt from evals.eval_utils import download_instances +from evals.eval_utils import ingest_repos +from evals.eval_utils import download_github_repo +from evals.eval_utils import delete_repo - -async def generate_patch_with_cognee(instance, search_type=SearchType.CHUNKS): +async def generate_patch_with_cognee(instance): await cognee.prune.prune_data() - await cognee.prune.prune_system(metadata=True) + await cognee.prune.prune_system() - dataset_name = "SWE_test_data" - code_text = instance["text"] - await cognee.add([code_text], dataset_name) - await code_graph_pipeline([dataset_name]) - graph_engine = await get_graph_engine() - with open(graph_engine.filename, "r") as f: - graph_str = f.read() + #dataset_name = "SWE_test_data" + + #await cognee.add('', dataset_name = dataset_name) + + # repo_path = download_github_repo(instance, '../RAW_GIT_REPOS') + + repo_path = '/Users/borisarzentar/Projects/graphrag' + + tasks = [ + Task(get_repo_file_dependencies), + Task(add_data_points), + Task(enrich_dependency_graph), + Task(expand_dependency_graph), + Task(add_data_points), + # Task(summarize_code, summarization_model = SummarizedContent), + ] + + pipeline = run_tasks(tasks, repo_path, "cognify_code_pipeline") + + async for result in pipeline: + print(result) + + print('Here we have the repo under the repo_path') + + await render_graph() problem_statement = instance['problem_statement'] instructions = read_query_prompt("patch_gen_instructions.txt") + graph_str = 'HERE WE SHOULD PASS THE TRIPLETS FROM GRAPHRAG' + prompt = "\n".join([ instructions, "", @@ -41,14 +74,18 @@ async def generate_patch_with_cognee(instance, search_type=SearchType.CHUNKS): graph_str ]) + return 0 + + ''' :TODO: We have to find out how do we do the generation llm_client = get_llm_client() answer_prediction = await llm_client.acreate_structured_output( text_input=problem_statement, system_prompt=prompt, response_model=str, ) - return answer_prediction + return answer_prediction + ''' async def generate_patch_without_cognee(instance): problem_statement = instance['problem_statement'] @@ -71,11 +108,16 @@ async def get_preds(dataset, with_cognee=True): model_name = "without_cognee" pred_func = generate_patch_without_cognee + + for instance in dataset: + await pred_func(instance) + + ''' preds = [{"instance_id": instance["instance_id"], "model_patch": await pred_func(instance), "model_name_or_path": model_name} for instance in dataset] - - return preds + ''' + return 0 async def main(): @@ -115,4 +157,5 @@ async def main(): if __name__ == "__main__": import asyncio + asyncio.run(main(), debug=True) diff --git a/evals/eval_utils.py b/evals/eval_utils.py index e95a84cec..3192127dc 100644 --- a/evals/eval_utils.py +++ b/evals/eval_utils.py @@ -8,7 +8,8 @@ from swebench.inference.make_datasets.create_instance import make_code_text from swebench.inference.make_datasets.utils import (AutoContextManager, ingest_directory_contents) from tqdm.auto import tqdm - +from git import Repo +import shutil def ingest_files(filenames): files_dict = dict() @@ -101,3 +102,56 @@ def download_instances( dataset = create_dataset(input_instances_with_text) dataset.save_to_disk(path) return dataset + + +def download_github_repo(instance, output_dir): + """ + Downloads a GitHub repository and checks out the specified commit. + + Args: + instance (dict): Dictionary containing 'repo', 'base_commit', and 'instance_id'. + output_dir (str): Directory to store the downloaded repositories. + + Returns: + str: Path to the downloaded repository. + """ + repo_owner_repo = instance['repo'] + base_commit = instance['base_commit'] + instance_id = instance['instance_id'] + + repo_url = f"https://github.com/{repo_owner_repo}.git" + + repo_path = os.path.abspath(os.path.join(output_dir, instance_id)) + + # Clone repository if it doesn't already exist + if not os.path.exists(repo_path): + print(f"Cloning {repo_url} to {repo_path}...") + Repo.clone_from(repo_url, repo_path) + else: + print(f"Repository already exists at {repo_path}.") + + + repo = Repo(repo_path) + repo.git.checkout(base_commit) + + return repo_path + + +def delete_repo(repo_path): + """ + Deletes the specified repository directory. + + Args: + repo_path (str): Path to the repository to delete. + + Returns: + None + """ + try: + if os.path.exists(repo_path): + shutil.rmtree(repo_path) + print(f"Deleted repository at {repo_path}.") + else: + print(f"Repository path {repo_path} does not exist. Nothing to delete.") + except Exception as e: + print(f"Error deleting repository at {repo_path}: {e}") \ No newline at end of file diff --git a/examples/python/code_graph_pipeline.py b/examples/python/code_graph_pipeline.py index fe4be371b..52c1e0474 100644 --- a/examples/python/code_graph_pipeline.py +++ b/examples/python/code_graph_pipeline.py @@ -1,22 +1,11 @@ -import argparse import asyncio -import os - from cognee.modules.pipelines import Task, run_tasks -from cognee.shared.CodeGraphEntities import CodeRelationship, Repository -from cognee.shared.data_models import SummarizedContent -from cognee.tasks.code.get_local_dependencies_checker import ( - get_local_script_dependencies, -) -from cognee.tasks.graph.convert_graph_from_code_graph import ( - create_code_file, - convert_graph_from_code_graph, -) from cognee.tasks.repo_processor import ( enrich_dependency_graph, expand_dependency_graph, - get_repo_dependency_graph, + get_repo_file_dependencies, ) +from cognee.tasks.storage import add_data_points from cognee.tasks.summarization import summarize_code @@ -24,58 +13,24 @@ async def print_results(pipeline): async for result in pipeline: print(result) - -async def get_local_script_dependencies_wrapper(script_path, repo_path): - dependencies = await get_local_script_dependencies(script_path, repo_path) - return (script_path, dependencies) - - -async def scan_repo(path, condition): - futures = [] - for root, dirs, files in os.walk(path): - for file in files: - if condition(file): - futures.append( - get_local_script_dependencies_wrapper( - os.path.abspath(f"{root}/{file}"), path - ) - ) - results = await asyncio.gather(*futures) - - code_files = {} - code_relationships = [] - for abspath, dependencies in results: - code_file, abspath = create_code_file(abspath, "python_file") - code_files[abspath] = code_file - - for dependency in dependencies: - dependency_code_file, dependency_abspath = create_code_file( - dependency, "python_file" - ) - code_files[dependency_abspath] = dependency_code_file - code_relationship = CodeRelationship( - source_id=abspath, - target_id=dependency_abspath, - type="files", - relation="depends_on", - ) - code_relationships.append(code_relationship) - - return (Repository(path=path), list(code_files.values()), code_relationships) - - if __name__ == "__main__": + ''' parser = argparse.ArgumentParser(description="Process a file path") parser.add_argument("path", help="Path to the file") args = parser.parse_args() abspath = os.path.abspath(args.path or ".") + ''' + + abspath = '/Users/laszlohajdu/Documents/Github/RAW_GIT_REPOS/astropy__astropy-12907' tasks = [ - Task(get_repo_dependency_graph), + Task(get_repo_file_dependencies), + Task(add_data_points), Task(enrich_dependency_graph), Task(expand_dependency_graph), - Task(convert_graph_from_code_graph), - Task(summarize_code, summarization_model = SummarizedContent), + Task(add_data_points), + # Task(summarize_code, summarization_model = SummarizedContent), ] pipeline = run_tasks(tasks, abspath, "cognify_code_pipeline") + asyncio.run(print_results(pipeline)) diff --git a/examples/python/dynamic_steps_example.py b/examples/python/dynamic_steps_example.py index 309aea82c..49b41db1c 100644 --- a/examples/python/dynamic_steps_example.py +++ b/examples/python/dynamic_steps_example.py @@ -1,32 +1,6 @@ import cognee import asyncio -from cognee.api.v1.search import SearchType - -job_position = """0:Senior Data Scientist (Machine Learning) - -Company: TechNova Solutions -Location: San Francisco, CA - -Job Description: - -TechNova Solutions is seeking a Senior Data Scientist specializing in Machine Learning to join our dynamic analytics team. The ideal candidate will have a strong background in developing and deploying machine learning models, working with large datasets, and translating complex data into actionable insights. - -Responsibilities: - -Develop and implement advanced machine learning algorithms and models. -Analyze large, complex datasets to extract meaningful patterns and insights. -Collaborate with cross-functional teams to integrate predictive models into products. -Stay updated with the latest advancements in machine learning and data science. -Mentor junior data scientists and provide technical guidance. -Qualifications: - -Master’s or Ph.D. in Data Science, Computer Science, Statistics, or a related field. -5+ years of experience in data science and machine learning. -Proficient in Python, R, and SQL. -Experience with deep learning frameworks (e.g., TensorFlow, PyTorch). -Strong problem-solving skills and attention to detail. -Candidate CVs -""" +from cognee.pipelines.retriever.two_steps_retriever import two_step_retriever job_1 = """ CV 1: Relevant @@ -195,7 +169,7 @@ async def main(enable_steps): # Step 2: Add text if enable_steps.get("add_text"): - text_list = [job_position, job_1, job_2, job_3, job_4, job_5] + text_list = [job_1, job_2, job_3, job_4, job_5] for text in text_list: await cognee.add(text) print(f"Added text: {text[:35]}...") @@ -206,24 +180,21 @@ async def main(enable_steps): print("Knowledge graph created.") # Step 4: Query insights - if enable_steps.get("search_insights"): - search_results = await cognee.search( - SearchType.INSIGHTS, - {'query': 'Which applicant has the most relevant experience in data science?'} - ) - print("Search results:") - for result_text in search_results: - print(result_text) + if enable_steps.get("retriever"): + await two_step_retriever('Who has Phd?') if __name__ == '__main__': # Flags to enable/disable steps + + rebuild_kg = True + retrieve = True steps_to_enable = { - "prune_data": True, - "prune_system": True, - "add_text": True, - "cognify": True, - "search_insights": True + "prune_data": rebuild_kg, + "prune_system": rebuild_kg, + "add_text": rebuild_kg, + "cognify": rebuild_kg, + "retriever": retrieve } asyncio.run(main(steps_to_enable)) diff --git a/examples/python/multimedia_example.py b/examples/python/multimedia_example.py new file mode 100644 index 000000000..6c8bc5995 --- /dev/null +++ b/examples/python/multimedia_example.py @@ -0,0 +1,48 @@ +import os +import asyncio +import pathlib + +import cognee +from cognee.api.v1.search import SearchType + +# Prerequisites: +# 1. Copy `.env.template` and rename it to `.env`. +# 2. Add your OpenAI API key to the `.env` file in the `LLM_API_KEY` field: +# LLM_API_KEY = "your_key_here" + + +async def main(): + # Create a clean slate for cognee -- reset data and system state + await cognee.prune.prune_data() + await cognee.prune.prune_system(metadata=True) + + # cognee knowledge graph will be created based on the text + # and description of these files + mp3_file_path = os.path.join( + pathlib.Path(__file__).parent.parent.parent, + ".data/multimedia/text_to_speech.mp3", + ) + png_file_path = os.path.join( + pathlib.Path(__file__).parent.parent.parent, + ".data/multimedia/example.png", + ) + + # Add the files, and make it available for cognify + await cognee.add([mp3_file_path, png_file_path]) + + # Use LLMs and cognee to create knowledge graph + await cognee.cognify() + + # Query cognee for summaries of the data in the multimedia files + search_results = await cognee.search( + SearchType.SUMMARIES, + query_text="What is in the multimedia files?", + ) + + # Display search results + for result_text in search_results: + print(result_text) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/python/simple_example.py b/examples/python/simple_example.py index 47940ca6e..55b07c4c3 100644 --- a/examples/python/simple_example.py +++ b/examples/python/simple_example.py @@ -1,5 +1,4 @@ import asyncio - import cognee from cognee.api.v1.search import SearchType @@ -11,29 +10,57 @@ from cognee.api.v1.search import SearchType async def main(): # Create a clean slate for cognee -- reset data and system state + print("Resetting cognee data...") await cognee.prune.prune_data() await cognee.prune.prune_system(metadata=True) + print("Data reset complete.\n") # cognee knowledge graph will be created based on this text text = """ Natural language processing (NLP) is an interdisciplinary subfield of computer science and information retrieval. """ - + + print("Adding text to cognee:") + print(text.strip()) # Add the text, and make it available for cognify await cognee.add(text) + print("Text added successfully.\n") + + print("Running cognify to create knowledge graph...\n") + print("Cognify process steps:") + print("1. Classifying the document: Determining the type and category of the input text.") + print("2. Checking permissions: Ensuring the user has the necessary rights to process the text.") + print("3. Extracting text chunks: Breaking down the text into sentences or phrases for analysis.") + print("4. Adding data points: Storing the extracted chunks for processing.") + print("5. Generating knowledge graph: Extracting entities and relationships to form a knowledge graph.") + print("6. Summarizing text: Creating concise summaries of the content for quick insights.\n") + # Use LLMs and cognee to create knowledge graph await cognee.cognify() + print("Cognify process complete.\n") + + query_text = 'Tell me about NLP' + print(f"Searching cognee for insights with query: '{query_text}'") # Query cognee for insights on the added text search_results = await cognee.search( - SearchType.INSIGHTS, query_text='Tell me about NLP' + SearchType.INSIGHTS, query_text=query_text ) - - # Display search results + + print("Search results:") + # Display results for result_text in search_results: print(result_text) + # Example output: + # ({'id': UUID('bc338a39-64d6-549a-acec-da60846dd90d'), 'updated_at': datetime.datetime(2024, 11, 21, 12, 23, 1, 211808, tzinfo=datetime.timezone.utc), 'name': 'natural language processing', 'description': 'An interdisciplinary subfield of computer science and information retrieval.'}, {'relationship_name': 'is_a_subfield_of', 'source_node_id': UUID('bc338a39-64d6-549a-acec-da60846dd90d'), 'target_node_id': UUID('6218dbab-eb6a-5759-a864-b3419755ffe0'), 'updated_at': datetime.datetime(2024, 11, 21, 12, 23, 15, 473137, tzinfo=datetime.timezone.utc)}, {'id': UUID('6218dbab-eb6a-5759-a864-b3419755ffe0'), 'updated_at': datetime.datetime(2024, 11, 21, 12, 23, 1, 211808, tzinfo=datetime.timezone.utc), 'name': 'computer science', 'description': 'The study of computation and information processing.'}) + # (...) + # It represents nodes and relationships in the knowledge graph: + # - The first element is the source node (e.g., 'natural language processing'). + # - The second element is the relationship between nodes (e.g., 'is_a_subfield_of'). + # - The third element is the target node (e.g., 'computer science'). + if __name__ == '__main__': asyncio.run(main()) diff --git a/notebooks/cognee_demo.ipynb b/notebooks/cognee_demo.ipynb index 67bb4e07f..13fcb8cb4 100644 --- a/notebooks/cognee_demo.ipynb +++ b/notebooks/cognee_demo.ipynb @@ -265,7 +265,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "df16431d0f48b006", "metadata": { "ExecuteTime": { @@ -304,7 +304,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "id": "9086abf3af077ab4", "metadata": { "ExecuteTime": { @@ -349,7 +349,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "id": "a9de0cc07f798b7f", "metadata": { "ExecuteTime": { @@ -393,7 +393,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "id": "185ff1c102d06111", "metadata": { "ExecuteTime": { @@ -437,7 +437,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "id": "d55ce4c58f8efb67", "metadata": { "ExecuteTime": { @@ -479,7 +479,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "id": "ca4ecc32721ad332", "metadata": { "ExecuteTime": { @@ -529,14 +529,14 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "id": "bce39dc6", "metadata": {}, "outputs": [], "source": [ "import os\n", "\n", - "# # Setting environment variables\n", + "# Setting environment variables\n", "if \"GRAPHISTRY_USERNAME\" not in os.environ: \n", " os.environ[\"GRAPHISTRY_USERNAME\"] = \"\"\n", "\n", @@ -546,24 +546,26 @@ "if \"LLM_API_KEY\" not in os.environ:\n", " os.environ[\"LLM_API_KEY\"] = \"\"\n", "\n", - "os.environ[\"GRAPH_DATABASE_PROVIDER\"]=\"networkx\" # \"neo4j\" or \"networkx\"\n", + "# \"neo4j\" or \"networkx\"\n", + "os.environ[\"GRAPH_DATABASE_PROVIDER\"]=\"networkx\" \n", "# Not needed if using networkx\n", - "#GRAPH_DATABASE_URL=\"\"\n", - "#GRAPH_DATABASE_USERNAME=\"\"\n", - "#GRAPH_DATABASE_PASSWORD=\"\"\n", + "#os.environ[\"GRAPH_DATABASE_URL\"]=\"\"\n", + "#os.environ[\"GRAPH_DATABASE_USERNAME\"]=\"\"\n", + "#os.environ[\"GRAPH_DATABASE_PASSWORD\"]=\"\"\n", "\n", - "os.environ[\"VECTOR_DB_PROVIDER\"]=\"lancedb\" # \"qdrant\", \"weaviate\" or \"lancedb\"\n", - "# Not needed if using \"lancedb\"\n", + "# \"pgvector\", \"qdrant\", \"weaviate\" or \"lancedb\"\n", + "os.environ[\"VECTOR_DB_PROVIDER\"]=\"lancedb\" \n", + "# Not needed if using \"lancedb\" or \"pgvector\"\n", "# os.environ[\"VECTOR_DB_URL\"]=\"\"\n", "# os.environ[\"VECTOR_DB_KEY\"]=\"\"\n", "\n", - "# Database provider\n", - "os.environ[\"DB_PROVIDER\"]=\"sqlite\" # or \"postgres\"\n", + "# Relational Database provider \"sqlite\" or \"postgres\"\n", + "os.environ[\"DB_PROVIDER\"]=\"sqlite\"\n", "\n", "# Database name\n", "os.environ[\"DB_NAME\"]=\"cognee_db\"\n", "\n", - "# Postgres specific parameters (Only if Postgres is run)\n", + "# Postgres specific parameters (Only if Postgres or PGVector is used)\n", "# os.environ[\"DB_HOST\"]=\"127.0.0.1\"\n", "# os.environ[\"DB_PORT\"]=\"5432\"\n", "# os.environ[\"DB_USERNAME\"]=\"cognee\"\n", @@ -620,7 +622,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "id": "7c431fdef4921ae0", "metadata": { "ExecuteTime": { diff --git a/notebooks/cognee_llama_index.ipynb b/notebooks/cognee_llama_index.ipynb index 742c2f51c..ec899aaea 100644 --- a/notebooks/cognee_llama_index.ipynb +++ b/notebooks/cognee_llama_index.ipynb @@ -52,7 +52,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -71,7 +71,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -90,23 +90,23 @@ "# \"neo4j\" or \"networkx\"\n", "os.environ[\"GRAPH_DATABASE_PROVIDER\"]=\"networkx\" \n", "# Not needed if using networkx\n", - "#GRAPH_DATABASE_URL=\"\"\n", - "#GRAPH_DATABASE_USERNAME=\"\"\n", - "#GRAPH_DATABASE_PASSWORD=\"\"\n", + "#os.environ[\"GRAPH_DATABASE_URL\"]=\"\"\n", + "#os.environ[\"GRAPH_DATABASE_USERNAME\"]=\"\"\n", + "#os.environ[\"GRAPH_DATABASE_PASSWORD\"]=\"\"\n", "\n", - "# \"qdrant\", \"weaviate\" or \"lancedb\"\n", + "# \"pgvector\", \"qdrant\", \"weaviate\" or \"lancedb\"\n", "os.environ[\"VECTOR_DB_PROVIDER\"]=\"lancedb\" \n", - "# Not needed if using \"lancedb\"\n", + "# Not needed if using \"lancedb\" or \"pgvector\"\n", "# os.environ[\"VECTOR_DB_URL\"]=\"\"\n", "# os.environ[\"VECTOR_DB_KEY\"]=\"\"\n", "\n", - "# Database provider\n", - "os.environ[\"DB_PROVIDER\"]=\"sqlite\" # or \"postgres\"\n", + "# Relational Database provider \"sqlite\" or \"postgres\"\n", + "os.environ[\"DB_PROVIDER\"]=\"sqlite\"\n", "\n", "# Database name\n", "os.environ[\"DB_NAME\"]=\"cognee_db\"\n", "\n", - "# Postgres specific parameters (Only if Postgres is run)\n", + "# Postgres specific parameters (Only if Postgres or PGVector is used)\n", "# os.environ[\"DB_HOST\"]=\"127.0.0.1\"\n", "# os.environ[\"DB_PORT\"]=\"5432\"\n", "# os.environ[\"DB_USERNAME\"]=\"cognee\"\n", @@ -130,8 +130,6 @@ "\n", "from cognee.infrastructure.databases.vector.pgvector import create_db_and_tables as create_pgvector_db_and_tables\n", "from cognee.infrastructure.databases.relational import create_db_and_tables as create_relational_db_and_tables\n", - "from cognee.infrastructure.databases.graph import get_graph_engine\n", - "from cognee.shared.utils import render_graph\n", "from cognee.modules.users.models import User\n", "from cognee.modules.users.methods import get_default_user\n", "from cognee.tasks.ingestion.ingest_data_with_metadata import ingest_data_with_metadata\n", @@ -196,6 +194,9 @@ "source": [ "import graphistry\n", "\n", + "from cognee.infrastructure.databases.graph import get_graph_engine\n", + "from cognee.shared.utils import render_graph\n", + "\n", "# Get graph\n", "graphistry.login(username=os.getenv(\"GRAPHISTRY_USERNAME\"), password=os.getenv(\"GRAPHISTRY_PASSWORD\"))\n", "graph_engine = await get_graph_engine()\n", diff --git a/notebooks/cognee_multimedia_demo.ipynb b/notebooks/cognee_multimedia_demo.ipynb new file mode 100644 index 000000000..2d35132f6 --- /dev/null +++ b/notebooks/cognee_multimedia_demo.ipynb @@ -0,0 +1,169 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Cognee GraphRAG with Multimedia files" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "vscode": { + "languageId": "plaintext" + } + }, + "source": [ + "## Load Data\n", + "\n", + "We will use a few sample multimedia files which we have on GitHub for easy access." + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import pathlib\n", + "\n", + "# cognee knowledge graph will be created based on the text\n", + "# and description of these files\n", + "mp3_file_path = os.path.join(\n", + " os.path.abspath(''), \"../\",\n", + " \".data/multimedia/text_to_speech.mp3\",\n", + ")\n", + "png_file_path = os.path.join(\n", + " os.path.abspath(''), \"../\",\n", + " \".data/multimedia/example.png\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Set environment variables" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "# Setting environment variables\n", + "if \"GRAPHISTRY_USERNAME\" not in os.environ: \n", + " os.environ[\"GRAPHISTRY_USERNAME\"] = \"\"\n", + "\n", + "if \"GRAPHISTRY_PASSWORD\" not in os.environ: \n", + " os.environ[\"GRAPHISTRY_PASSWORD\"] = \"\"\n", + "\n", + "if \"LLM_API_KEY\" not in os.environ:\n", + " os.environ[\"LLM_API_KEY\"] = \"\"\n", + "\n", + "# \"neo4j\" or \"networkx\"\n", + "os.environ[\"GRAPH_DATABASE_PROVIDER\"]=\"networkx\" \n", + "# Not needed if using networkx\n", + "#os.environ[\"GRAPH_DATABASE_URL\"]=\"\"\n", + "#os.environ[\"GRAPH_DATABASE_USERNAME\"]=\"\"\n", + "#os.environ[\"GRAPH_DATABASE_PASSWORD\"]=\"\"\n", + "\n", + "# \"pgvector\", \"qdrant\", \"weaviate\" or \"lancedb\"\n", + "os.environ[\"VECTOR_DB_PROVIDER\"]=\"lancedb\" \n", + "# Not needed if using \"lancedb\" or \"pgvector\"\n", + "# os.environ[\"VECTOR_DB_URL\"]=\"\"\n", + "# os.environ[\"VECTOR_DB_KEY\"]=\"\"\n", + "\n", + "# Relational Database provider \"sqlite\" or \"postgres\"\n", + "os.environ[\"DB_PROVIDER\"]=\"sqlite\"\n", + "\n", + "# Database name\n", + "os.environ[\"DB_NAME\"]=\"cognee_db\"\n", + "\n", + "# Postgres specific parameters (Only if Postgres or PGVector is used)\n", + "# os.environ[\"DB_HOST\"]=\"127.0.0.1\"\n", + "# os.environ[\"DB_PORT\"]=\"5432\"\n", + "# os.environ[\"DB_USERNAME\"]=\"cognee\"\n", + "# os.environ[\"DB_PASSWORD\"]=\"cognee\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Run Cognee with multimedia files" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import cognee\n", + "\n", + "# Create a clean slate for cognee -- reset data and system state\n", + "await cognee.prune.prune_data()\n", + "await cognee.prune.prune_system(metadata=True)\n", + "\n", + "# Add multimedia files and make them available for cognify\n", + "await cognee.add([mp3_file_path, png_file_path])\n", + "\n", + "# Create knowledge graph with cognee\n", + "await cognee.cognify()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Query Cognee for summaries related to multimedia files" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from cognee.api.v1.search import SearchType\n", + "\n", + "# Query cognee for summaries of the data in the multimedia files\n", + "search_results = await cognee.search(\n", + " SearchType.SUMMARIES,\n", + " query_text=\"What is in the multimedia files?\",\n", + ")\n", + "\n", + "# Display search results\n", + "for result_text in search_results:\n", + " print(result_text)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.6" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/poetry.lock b/poetry.lock index 96f9aec27..01f7e44a4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -6171,11 +6171,6 @@ files = [ {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"}, {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"}, {file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"}, - {file = "scikit_learn-1.5.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5"}, - {file = "scikit_learn-1.5.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908"}, - {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3"}, - {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12"}, - {file = "scikit_learn-1.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f"}, {file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"}, {file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"}, {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"}, From f47b185a9e96fb573a06301bf3f63b59288a1d35 Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Wed, 27 Nov 2024 10:53:48 +0100 Subject: [PATCH 07/52] feat/add correctness score calculation with LLM as a judge --- .gitignore | 1 - .../llm/prompts/answer_question.txt | 2 + .../llm/prompts/answer_question_kg.txt | 2 + .../llm/prompts/context_for_question.txt | 2 + evals/llm_as_a_judge.py | 110 ++++++++++++++++++ 5 files changed, 116 insertions(+), 1 deletion(-) create mode 100644 cognee/infrastructure/llm/prompts/answer_question.txt create mode 100644 cognee/infrastructure/llm/prompts/answer_question_kg.txt create mode 100644 cognee/infrastructure/llm/prompts/context_for_question.txt create mode 100644 evals/llm_as_a_judge.py diff --git a/.gitignore b/.gitignore index d256013d2..5096c96b4 100644 --- a/.gitignore +++ b/.gitignore @@ -12,7 +12,6 @@ __pycache__/ *$py.class full_run.ipynb -evals/ # C extensions *.so diff --git a/cognee/infrastructure/llm/prompts/answer_question.txt b/cognee/infrastructure/llm/prompts/answer_question.txt new file mode 100644 index 000000000..06a0602d1 --- /dev/null +++ b/cognee/infrastructure/llm/prompts/answer_question.txt @@ -0,0 +1,2 @@ +Answer the question using the provided context. Be as brief as possible. +Each entry in the context is a paragraph, which is represented as a list with two elements [title, sentences] and sentences is a list of strings. \ No newline at end of file diff --git a/cognee/infrastructure/llm/prompts/answer_question_kg.txt b/cognee/infrastructure/llm/prompts/answer_question_kg.txt new file mode 100644 index 000000000..df303f54a --- /dev/null +++ b/cognee/infrastructure/llm/prompts/answer_question_kg.txt @@ -0,0 +1,2 @@ +Answer the question using the provided context. Be as brief as possible. +Each entry in the context is tuple of length 3, representing an edge of a knowledge graph with its two nodes. \ No newline at end of file diff --git a/cognee/infrastructure/llm/prompts/context_for_question.txt b/cognee/infrastructure/llm/prompts/context_for_question.txt new file mode 100644 index 000000000..cc9ccffa3 --- /dev/null +++ b/cognee/infrastructure/llm/prompts/context_for_question.txt @@ -0,0 +1,2 @@ +The question is: `{{ question }}` +And here is the context: `{{ context }}` \ No newline at end of file diff --git a/evals/llm_as_a_judge.py b/evals/llm_as_a_judge.py new file mode 100644 index 000000000..239c7aea9 --- /dev/null +++ b/evals/llm_as_a_judge.py @@ -0,0 +1,110 @@ +import argparse +import asyncio +import json +import statistics +from pathlib import Path + +import wget +from deepeval.dataset import EvaluationDataset +from deepeval.metrics import GEval +from deepeval.test_case import LLMTestCase, LLMTestCaseParams +from tqdm import tqdm + +import cognee +from cognee.api.v1.search import SearchType +from cognee.base_config import get_base_config +from cognee.infrastructure.llm.get_llm_client import get_llm_client +from cognee.infrastructure.llm.prompts import read_query_prompt, render_prompt + + +async def answer_without_cognee(instance): + args = { + "question": instance["question"], + "context": instance["context"], + } + user_prompt = render_prompt("context_for_question.txt", args) + system_prompt = read_query_prompt("answer_question.txt") + + llm_client = get_llm_client() + answer_prediction = await llm_client.acreate_structured_output( + text_input=user_prompt, + system_prompt=system_prompt, + response_model=str, + ) + return answer_prediction + +async def answer_with_cognee(instance): + + await cognee.prune.prune_data() + await cognee.prune.prune_system(metadata=True) + for (title, sentences) in instance["context"]: + await cognee.add("\n".join(sentences), dataset_name = "HotPotQA") + await cognee.cognify("HotPotQA") + + search_results = await cognee.search( + SearchType.INSIGHTS, query_text=instance["question"] + ) + + args = { + "question": instance["question"], + "context": search_results, + } + user_prompt = render_prompt("context_for_question.txt", args) + system_prompt = read_query_prompt("answer_question_kg.txt") + + llm_client = get_llm_client() + answer_prediction = await llm_client.acreate_structured_output( + text_input=user_prompt, + system_prompt=system_prompt, + response_model=str, + ) + return answer_prediction + +correctness_metric = GEval( + name="Correctness", + model="gpt-4o-mini", + evaluation_params=[ + LLMTestCaseParams.ACTUAL_OUTPUT, + LLMTestCaseParams.EXPECTED_OUTPUT + ], + evaluation_steps=[ + "Determine whether the actual output is factually correct based on the expected output." + ] + ) + + +async def eval_correctness(with_cognee=True, num_samples=None): + base_config = get_base_config() + data_root_dir = base_config.data_root_directory + filepath = data_root_dir / Path("hotpot_dev_fullwiki_v1.json") + if not filepath.exists(): + url = 'http://curtis.ml.cmu.edu/datasets/hotpot/hotpot_dev_fullwiki_v1.json' + wget.download(url, out=data_root_dir) + with open(filepath, "r") as file: + dataset = json.load(file) + test_cases = [] + if not num_samples: + num_samples = len(dataset) + for instance in tqdm(dataset[:num_samples], desc="Evaluating correctness"): + if with_cognee: + answer = await answer_with_cognee(instance) + else: + answer = await answer_without_cognee(instance) + test_case = LLMTestCase( + input=instance["question"], + actual_output=answer, + expected_output=instance["answer"] + ) + test_cases.append(test_case) + evalset = EvaluationDataset(test_cases) + evalresults = evalset.evaluate([correctness_metric]) + avg_correctness = statistics.mean([result.metrics_data[0].score for result in evalresults.test_results]) + return avg_correctness + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--with_cognee", action="store_true") + parser.add_argument("--num_samples", type=int, default=500) + args = parser.parse_args() + avg_correctness = asyncio.run(eval_correctness(args.with_cognee, args.num_samples)) + print(f"Average correctness: {avg_correctness}") \ No newline at end of file From 4aa634d5e1a6697f444a417018a0cc81b1e698c4 Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Wed, 27 Nov 2024 16:14:05 +0100 Subject: [PATCH 08/52] Eval function takes eval_metric as input. Works with deepeval metrics like AnswerRelevancyMetric --- evals/llm_as_a_judge.py | 72 ++++++++++++++++++++++------------------- 1 file changed, 39 insertions(+), 33 deletions(-) diff --git a/evals/llm_as_a_judge.py b/evals/llm_as_a_judge.py index 239c7aea9..8dd1518a7 100644 --- a/evals/llm_as_a_judge.py +++ b/evals/llm_as_a_judge.py @@ -4,13 +4,14 @@ import json import statistics from pathlib import Path +import deepeval.metrics import wget from deepeval.dataset import EvaluationDataset -from deepeval.metrics import GEval -from deepeval.test_case import LLMTestCase, LLMTestCaseParams +from deepeval.test_case import LLMTestCase from tqdm import tqdm import cognee +import evals.deepeval_metrics from cognee.api.v1.search import SearchType from cognee.base_config import get_base_config from cognee.infrastructure.llm.get_llm_client import get_llm_client @@ -34,7 +35,6 @@ async def answer_without_cognee(instance): return answer_prediction async def answer_with_cognee(instance): - await cognee.prune.prune_data() await cognee.prune.prune_system(metadata=True) for (title, sentences) in instance["context"]: @@ -60,20 +60,23 @@ async def answer_with_cognee(instance): ) return answer_prediction -correctness_metric = GEval( - name="Correctness", - model="gpt-4o-mini", - evaluation_params=[ - LLMTestCaseParams.ACTUAL_OUTPUT, - LLMTestCaseParams.EXPECTED_OUTPUT - ], - evaluation_steps=[ - "Determine whether the actual output is factually correct based on the expected output." - ] - ) +async def eval_answers(instances, answers, eval_metric): + test_cases = [] + for i in range(len(answers)): + instance = instances[i] + answer = answers[i] + test_case = LLMTestCase( + input=instance["question"], + actual_output=answer, + expected_output=instance["answer"] + ) + test_cases.append(test_case) + evalset = EvaluationDataset(test_cases) + evalresults = evalset.evaluate([eval_metric]) + return evalresults -async def eval_correctness(with_cognee=True, num_samples=None): +async def eval_on_hotpotQA(answer_provider, num_samples, eval_metric): base_config = get_base_config() data_root_dir = base_config.data_root_directory filepath = data_root_dir / Path("hotpot_dev_fullwiki_v1.json") @@ -82,29 +85,32 @@ async def eval_correctness(with_cognee=True, num_samples=None): wget.download(url, out=data_root_dir) with open(filepath, "r") as file: dataset = json.load(file) - test_cases = [] if not num_samples: num_samples = len(dataset) - for instance in tqdm(dataset[:num_samples], desc="Evaluating correctness"): - if with_cognee: - answer = await answer_with_cognee(instance) - else: - answer = await answer_without_cognee(instance) - test_case = LLMTestCase( - input=instance["question"], - actual_output=answer, - expected_output=instance["answer"] - ) - test_cases.append(test_case) - evalset = EvaluationDataset(test_cases) - evalresults = evalset.evaluate([correctness_metric]) - avg_correctness = statistics.mean([result.metrics_data[0].score for result in evalresults.test_results]) - return avg_correctness + instances = dataset[:num_samples] + answers = [] + for instance in tqdm(instances, desc="Getting answers"): + answer = await answer_provider(instance) + answers.append(answer) + evalresults = await eval_answers(instances, answers, eval_metric) + avg_score = statistics.mean([result.metrics_data[0].score for result in evalresults.test_results]) + return avg_score if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--with_cognee", action="store_true") parser.add_argument("--num_samples", type=int, default=500) + parser.add_argument("--metric", type=str, default="correctness_metric") args = parser.parse_args() - avg_correctness = asyncio.run(eval_correctness(args.with_cognee, args.num_samples)) - print(f"Average correctness: {avg_correctness}") \ No newline at end of file + + try: + metric_cls = getattr(deepeval.metrics, args.metric) + metric = metric_cls() + except AttributeError: + metric = getattr(evals.deepeval_metrics, args.metric) + if args.with_cognee: + answer_provider = answer_with_cognee + else: + answer_provider = answer_without_cognee + avg_score = asyncio.run(eval_on_hotpotQA(answer_provider, args.num_samples, metric)) + print(f"Average {args.metric}: {avg_score}") \ No newline at end of file From 6403d15a769db9af95a3cede99873908a709909f Mon Sep 17 00:00:00 2001 From: Boris Date: Wed, 27 Nov 2024 22:55:30 +0100 Subject: [PATCH 09/52] fix: enable falkordb and add test for it (#31) --- .../databases/graph/get_graph_engine.py | 2 +- .../hybrid/falkordb/FalkorDBAdapter.py | 100 ++++++++++++++---- .../databases/vector/create_vector_engine.py | 2 +- .../embeddings/LiteLLMEmbeddingEngine.py | 6 +- .../vector/lancedb/LanceDBAdapter.py | 1 - .../infrastructure/engine/models/DataPoint.py | 12 ++- cognee/modules/engine/models/Entity.py | 5 +- cognee/modules/engine/models/EntityType.py | 5 +- .../graph/utils/get_graph_from_model.py | 1 + cognee/tasks/storage/index_data_points.py | 15 ++- cognee/tests/test_falkordb.py | 83 +++++++++++++++ 11 files changed, 191 insertions(+), 41 deletions(-) create mode 100755 cognee/tests/test_falkordb.py diff --git a/cognee/infrastructure/databases/graph/get_graph_engine.py b/cognee/infrastructure/databases/graph/get_graph_engine.py index 5b03cac2e..806a1ad69 100644 --- a/cognee/infrastructure/databases/graph/get_graph_engine.py +++ b/cognee/infrastructure/databases/graph/get_graph_engine.py @@ -21,7 +21,7 @@ async def get_graph_engine() -> GraphDBInterface: ) elif config.graph_database_provider == "falkordb": - if not (config.graph_database_url and config.graph_database_username and config.graph_database_password): + if not (config.graph_database_url and config.graph_database_port): raise EnvironmentError("Missing required FalkorDB credentials.") from cognee.infrastructure.databases.vector.embeddings import get_embedding_engine diff --git a/cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py b/cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py index ea5a75088..bd6a2bc2d 100644 --- a/cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py +++ b/cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py @@ -1,7 +1,7 @@ import asyncio -from textwrap import dedent -from typing import Any +# from datetime import datetime from uuid import UUID +from textwrap import dedent from falkordb import FalkorDB from cognee.infrastructure.engine import DataPoint @@ -43,23 +43,31 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): async def embed_data(self, data: list[str]) -> list[list[float]]: return await self.embedding_engine.embed_text(data) - async def stringify_properties(self, properties: dict, vectorize_fields = []) -> str: - async def get_value(key, value): - return f"'{value}'" if key not in vectorize_fields else await self.get_vectorized_value(value) + async def stringify_properties(self, properties: dict) -> str: + def parse_value(value): + if type(value) is UUID: + return f"'{str(value)}'" + if type(value) is int or type(value) is float: + return value + if type(value) is list and type(value[0]) is float and len(value) == self.embedding_engine.get_vector_size(): + return f"'vecf32({value})'" + # if type(value) is datetime: + # return datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%f%z") + return f"'{value}'" - return ",".join([f"{key}:{await get_value(key, value)}" for key, value in properties.items()]) + return ",".join([f"{key}:{parse_value(value)}" for key, value in properties.items()]) - async def get_vectorized_value(self, value: Any) -> str: - vector = (await self.embed_data([value]))[0] - return f"vecf32({vector})" + async def create_data_point_query(self, data_point: DataPoint, vectorized_values: list = None): + node_label = type(data_point).__tablename__ + embeddable_fields = data_point._metadata.get("index_fields", []) - async def create_data_point_query(self, data_point: DataPoint): - node_label = type(data_point).__name__ - node_properties = await self.stringify_properties( - data_point.model_dump(), - data_point._metadata["index_fields"], - # data_point._metadata["index_fields"] if hasattr(data_point, "_metadata") else [], - ) + node_properties = await self.stringify_properties({ + **data_point.model_dump(), + **({ + embeddable_fields[index]: vectorized_values[index] \ + for index in range(len(embeddable_fields)) \ + } if vectorized_values is not None else {}), + }) return dedent(f""" MERGE (node:{node_label} {{id: '{str(data_point.id)}'}}) @@ -90,7 +98,33 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): return collection_name in collections async def create_data_points(self, data_points: list[DataPoint]): - queries = [await self.create_data_point_query(data_point) for data_point in data_points] + embeddable_values = [DataPoint.get_embeddable_properties(data_point) for data_point in data_points] + + vectorized_values = await self.embed_data( + sum(embeddable_values, []) + ) + + index = 0 + positioned_vectorized_values = [] + + for values in embeddable_values: + if len(values) > 0: + values_list = [] + for i in range(len(values)): + values_list.append(vectorized_values[index + i]) + + positioned_vectorized_values.append(values_list) + index += len(values) + else: + positioned_vectorized_values.append(None) + + queries = [ + await self.create_data_point_query( + data_point, + positioned_vectorized_values[index], + ) for index, data_point in enumerate(data_points) + ] + for query in queries: self.query(query) @@ -205,10 +239,12 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): if query_text and not query_vector: query_vector = (await self.embed_data([query_text]))[0] + [label, attribute_name] = collection_name.split(".") + query = dedent(f""" CALL db.idx.vector.queryNodes( - {collection_name}, - 'text', + '{label}', + '{attribute_name}', {limit}, vecf32({query_vector}) ) YIELD node, score @@ -216,7 +252,7 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): result = self.query(query) - return result + return result.result_set async def batch_search( self, @@ -236,6 +272,30 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): ) for query_vector in query_vectors] ) + async def get_graph_data(self): + query = "MATCH (n) RETURN ID(n) AS id, labels(n) AS labels, properties(n) AS properties" + + result = self.query(query) + + nodes = [( + record[2]["id"], + record[2], + ) for record in result.result_set] + + query = """ + MATCH (n)-[r]->(m) + RETURN ID(n) AS source, ID(m) AS target, TYPE(r) AS type, properties(r) AS properties + """ + result = self.query(query) + edges = [( + record[3]["source_node_id"], + record[3]["target_node_id"], + record[2], + record[3], + ) for record in result.result_set] + + return (nodes, edges) + async def delete_data_points(self, collection_name: str, data_point_ids: list[str]): return self.query( f"MATCH (node) WHERE node.id IN $node_ids DETACH DELETE node", diff --git a/cognee/infrastructure/databases/vector/create_vector_engine.py b/cognee/infrastructure/databases/vector/create_vector_engine.py index 4b4799ee7..96937a132 100644 --- a/cognee/infrastructure/databases/vector/create_vector_engine.py +++ b/cognee/infrastructure/databases/vector/create_vector_engine.py @@ -58,7 +58,7 @@ def create_vector_engine(config: VectorConfig, embedding_engine): ) elif config["vector_db_provider"] == "falkordb": - if not (config["vector_db_url"] and config["vector_db_key"]): + if not (config["vector_db_url"] and config["vector_db_port"]): raise EnvironmentError("Missing requred FalkorDB credentials!") from ..hybrid.falkordb.FalkorDBAdapter import FalkorDBAdapter diff --git a/cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py b/cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py index 617698fd1..ab1274fb8 100644 --- a/cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py +++ b/cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py @@ -36,10 +36,10 @@ class LiteLLMEmbeddingEngine(EmbeddingEngine): api_version = self.api_version ) - return response.data[0]["embedding"] + return [data["embedding"] for data in response.data] - tasks = [get_embedding(text_) for text_ in text] - result = await asyncio.gather(*tasks) + # tasks = [get_embedding(text_) for text_ in text] + result = await get_embedding(text) return result def get_vector_size(self) -> int: diff --git a/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py b/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py index 0b68cb3b7..86895155f 100644 --- a/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py +++ b/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py @@ -1,4 +1,3 @@ -import inspect from typing import List, Optional, get_type_hints, Generic, TypeVar import asyncio from uuid import UUID diff --git a/cognee/infrastructure/engine/models/DataPoint.py b/cognee/infrastructure/engine/models/DataPoint.py index d08c52fa8..5d6b1d513 100644 --- a/cognee/infrastructure/engine/models/DataPoint.py +++ b/cognee/infrastructure/engine/models/DataPoint.py @@ -23,7 +23,15 @@ class DataPoint(BaseModel): if self._metadata and len(self._metadata["index_fields"]) > 0 \ and hasattr(self, self._metadata["index_fields"][0]): attribute = getattr(self, self._metadata["index_fields"][0]) + if isinstance(attribute, str): - return(attribute.strip()) + return attribute.strip() else: - return (attribute) + return attribute + + @classmethod + def get_embeddable_properties(self, data_point): + if data_point._metadata and len(data_point._metadata["index_fields"]) > 0: + return [getattr(data_point, field, None) for field in data_point._metadata["index_fields"]] + + return [] diff --git a/cognee/modules/engine/models/Entity.py b/cognee/modules/engine/models/Entity.py index c5579a610..adf22dfef 100644 --- a/cognee/modules/engine/models/Entity.py +++ b/cognee/modules/engine/models/Entity.py @@ -1,9 +1,6 @@ -from typing import Union - from cognee.infrastructure.engine import DataPoint from cognee.modules.chunking.models.DocumentChunk import DocumentChunk from cognee.modules.engine.models.EntityType import EntityType -from cognee.shared.CodeGraphEntities import Repository class Entity(DataPoint): @@ -11,7 +8,7 @@ class Entity(DataPoint): name: str is_a: EntityType description: str - mentioned_in: Union[DocumentChunk, Repository] + mentioned_in: DocumentChunk _metadata: dict = { "index_fields": ["name"], } diff --git a/cognee/modules/engine/models/EntityType.py b/cognee/modules/engine/models/EntityType.py index 685958935..438191cd7 100644 --- a/cognee/modules/engine/models/EntityType.py +++ b/cognee/modules/engine/models/EntityType.py @@ -1,8 +1,5 @@ -from typing import Union - from cognee.infrastructure.engine import DataPoint from cognee.modules.chunking.models.DocumentChunk import DocumentChunk -from cognee.shared.CodeGraphEntities import Repository class EntityType(DataPoint): @@ -10,7 +7,7 @@ class EntityType(DataPoint): name: str type: str description: str - exists_in: Union[DocumentChunk, Repository] + exists_in: DocumentChunk _metadata: dict = { "index_fields": ["name"], } diff --git a/cognee/modules/graph/utils/get_graph_from_model.py b/cognee/modules/graph/utils/get_graph_from_model.py index d2c6269ab..ec1da85e3 100644 --- a/cognee/modules/graph/utils/get_graph_from_model.py +++ b/cognee/modules/graph/utils/get_graph_from_model.py @@ -122,6 +122,7 @@ async def get_graph_from_model( type(data_point), include_fields = { "_metadata": (dict, data_point._metadata), + "__tablename__": data_point.__tablename__, }, exclude_fields = excluded_properties, ) diff --git a/cognee/tasks/storage/index_data_points.py b/cognee/tasks/storage/index_data_points.py index 03ad30f9d..01c2c2796 100644 --- a/cognee/tasks/storage/index_data_points.py +++ b/cognee/tasks/storage/index_data_points.py @@ -1,3 +1,4 @@ +import asyncio from cognee.infrastructure.databases.vector import get_vector_engine from cognee.infrastructure.engine import DataPoint @@ -9,8 +10,12 @@ async def index_data_points(data_points: list[DataPoint]): flat_data_points: list[DataPoint] = [] - for data_point in data_points: - flat_data_points.extend(get_data_points_from_model(data_point)) + results = await asyncio.gather(*[ + get_data_points_from_model(data_point) for data_point in data_points + ]) + + for result in results: + flat_data_points.extend(result) for data_point in flat_data_points: data_point_type = type(data_point) @@ -38,7 +43,7 @@ async def index_data_points(data_points: list[DataPoint]): return data_points -def get_data_points_from_model(data_point: DataPoint, added_data_points = None, visited_properties = None) -> list[DataPoint]: +async def get_data_points_from_model(data_point: DataPoint, added_data_points = None, visited_properties = None) -> list[DataPoint]: data_points = [] added_data_points = added_data_points or {} visited_properties = visited_properties or {} @@ -52,7 +57,7 @@ def get_data_points_from_model(data_point: DataPoint, added_data_points = None, visited_properties[property_key] = True - new_data_points = get_data_points_from_model(field_value, added_data_points, visited_properties) + new_data_points = await get_data_points_from_model(field_value, added_data_points, visited_properties) for new_point in new_data_points: if str(new_point.id) not in added_data_points: @@ -68,7 +73,7 @@ def get_data_points_from_model(data_point: DataPoint, added_data_points = None, visited_properties[property_key] = True - new_data_points = get_data_points_from_model(field_value_item, added_data_points, visited_properties) + new_data_points = await get_data_points_from_model(field_value_item, added_data_points, visited_properties) for new_point in new_data_points: if str(new_point.id) not in added_data_points: diff --git a/cognee/tests/test_falkordb.py b/cognee/tests/test_falkordb.py new file mode 100755 index 000000000..36c029cf7 --- /dev/null +++ b/cognee/tests/test_falkordb.py @@ -0,0 +1,83 @@ +import os +import logging +import pathlib +import cognee +from cognee.api.v1.search import SearchType +from cognee.shared.utils import render_graph + +logging.basicConfig(level = logging.DEBUG) + +async def main(): + data_directory_path = str(pathlib.Path(os.path.join(pathlib.Path(__file__).parent, ".data_storage/test_library")).resolve()) + cognee.config.data_root_directory(data_directory_path) + cognee_directory_path = str(pathlib.Path(os.path.join(pathlib.Path(__file__).parent, ".cognee_system/test_library")).resolve()) + cognee.config.system_root_directory(cognee_directory_path) + + await cognee.prune.prune_data() + await cognee.prune.prune_system(metadata = True) + + dataset_name = "artificial_intelligence" + + ai_text_file_path = os.path.join(pathlib.Path(__file__).parent, "test_data/artificial-intelligence.pdf") + await cognee.add([ai_text_file_path], dataset_name) + + text = """A large language model (LLM) is a language model notable for its ability to achieve general-purpose language generation and other natural language processing tasks such as classification. LLMs acquire these abilities by learning statistical relationships from text documents during a computationally intensive self-supervised and semi-supervised training process. LLMs can be used for text generation, a form of generative AI, by taking an input text and repeatedly predicting the next token or word. + LLMs are artificial neural networks. The largest and most capable, as of March 2024, are built with a decoder-only transformer-based architecture while some recent implementations are based on other architectures, such as recurrent neural network variants and Mamba (a state space model). + Up to 2020, fine tuning was the only way a model could be adapted to be able to accomplish specific tasks. Larger sized models, such as GPT-3, however, can be prompt-engineered to achieve similar results.[6] They are thought to acquire knowledge about syntax, semantics and "ontology" inherent in human language corpora, but also inaccuracies and biases present in the corpora. + Some notable LLMs are OpenAI's GPT series of models (e.g., GPT-3.5 and GPT-4, used in ChatGPT and Microsoft Copilot), Google's PaLM and Gemini (the latter of which is currently used in the chatbot of the same name), xAI's Grok, Meta's LLaMA family of open-source models, Anthropic's Claude models, Mistral AI's open source models, and Databricks' open source DBRX. + """ + + await cognee.add([text], dataset_name) + + await cognee.cognify([dataset_name]) + + # await render_graph(None, include_labels = True, include_nodes = True) + + from cognee.infrastructure.databases.vector import get_vector_engine + vector_engine = get_vector_engine() + random_node = (await vector_engine.search("entity.name", "AI"))[0] + random_node_name = random_node.payload["text"] + + search_results = await cognee.search(SearchType.INSIGHTS, query_text = random_node_name) + assert len(search_results) != 0, "The search results list is empty." + print("\n\nExtracted sentences are:\n") + for result in search_results: + print(f"{result}\n") + + search_results = await cognee.search(SearchType.CHUNKS, query_text = random_node_name) + assert len(search_results) != 0, "The search results list is empty." + print("\n\nExtracted chunks are:\n") + for result in search_results: + print(f"{result}\n") + + search_results = await cognee.search(SearchType.SUMMARIES, query_text = random_node_name) + assert len(search_results) != 0, "Query related summaries don't exist." + print("\nExtracted summaries are:\n") + for result in search_results: + print(f"{result}\n") + + history = await cognee.get_search_history() + + assert len(history) == 6, "Search history is not correct." + + # Assert local data files are cleaned properly + await cognee.prune.prune_data() + assert not os.path.isdir(data_directory_path), "Local data files are not deleted" + + # Assert relational, vector and graph databases have been cleaned properly + await cognee.prune.prune_system(metadata=True) + + connection = await vector_engine.get_connection() + collection_names = await connection.table_names() + assert len(collection_names) == 0, "LanceDB vector database is not empty" + + from cognee.infrastructure.databases.relational import get_relational_engine + assert not os.path.exists(get_relational_engine().db_path), "SQLite relational database is not empty" + + from cognee.infrastructure.databases.graph import get_graph_config + graph_config = get_graph_config() + assert not os.path.exists(graph_config.graph_file_path), "Networkx graph database is not empty" + +if __name__ == "__main__": + import asyncio + asyncio.run(main(), debug=True) From 2408fd7a01765b9e466748d0bd2a83b185469c4a Mon Sep 17 00:00:00 2001 From: Boris Arzentar Date: Thu, 28 Nov 2024 09:12:37 +0100 Subject: [PATCH 10/52] fix: falkordb adapter errors --- .../hybrid/falkordb/FalkorDBAdapter.py | 76 ++++++++++--------- .../embeddings/LiteLLMEmbeddingEngine.py | 21 +++-- .../infrastructure/engine/models/DataPoint.py | 4 + .../graph/utils/get_graph_from_model.py | 18 ++--- cognee/shared/CodeGraphEntities.py | 1 - .../repo_processor/enrich_dependency_graph.py | 25 +++--- .../repo_processor/expand_dependency_graph.py | 6 +- .../get_repo_file_dependencies.py | 13 ++-- cognee/tests/test_falkordb.py | 4 +- evals/eval_swe_bench.py | 11 ++- 10 files changed, 101 insertions(+), 78 deletions(-) diff --git a/cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py b/cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py index bd6a2bc2d..32a9853c2 100644 --- a/cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py +++ b/cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py @@ -1,5 +1,6 @@ import asyncio # from datetime import datetime +import json from uuid import UUID from textwrap import dedent from falkordb import FalkorDB @@ -53,28 +54,28 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): return f"'vecf32({value})'" # if type(value) is datetime: # return datetime.strptime(value, "%Y-%m-%dT%H:%M:%S.%f%z") + if type(value) is dict: + return f"'{json.dumps(value)}'" return f"'{value}'" return ",".join([f"{key}:{parse_value(value)}" for key, value in properties.items()]) - async def create_data_point_query(self, data_point: DataPoint, vectorized_values: list = None): + async def create_data_point_query(self, data_point: DataPoint, vectorized_values: dict): node_label = type(data_point).__tablename__ - embeddable_fields = data_point._metadata.get("index_fields", []) + property_names = DataPoint.get_embeddable_property_names(data_point) node_properties = await self.stringify_properties({ **data_point.model_dump(), **({ - embeddable_fields[index]: vectorized_values[index] \ - for index in range(len(embeddable_fields)) \ - } if vectorized_values is not None else {}), + property_names[index]: (vectorized_values[index] if index in vectorized_values else None) \ + for index in range(len(property_names)) \ + }), }) return dedent(f""" MERGE (node:{node_label} {{id: '{str(data_point.id)}'}}) - ON CREATE SET node += ({{{node_properties}}}) - ON CREATE SET node.updated_at = timestamp() - ON MATCH SET node += ({{{node_properties}}}) - ON MATCH SET node.updated_at = timestamp() + ON CREATE SET node += ({{{node_properties}}}), node.updated_at = timestamp() + ON MATCH SET node += ({{{node_properties}}}), node.updated_at = timestamp() """).strip() async def create_edge_query(self, edge: tuple[str, str, str, dict]) -> str: @@ -98,31 +99,33 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): return collection_name in collections async def create_data_points(self, data_points: list[DataPoint]): - embeddable_values = [DataPoint.get_embeddable_properties(data_point) for data_point in data_points] + embeddable_values = [] + vector_map = {} - vectorized_values = await self.embed_data( - sum(embeddable_values, []) - ) + for data_point in data_points: + property_names = DataPoint.get_embeddable_property_names(data_point) + key = str(data_point.id) + vector_map[key] = {} - index = 0 - positioned_vectorized_values = [] + for property_name in property_names: + property_value = getattr(data_point, property_name, None) - for values in embeddable_values: - if len(values) > 0: - values_list = [] - for i in range(len(values)): - values_list.append(vectorized_values[index + i]) + if property_value is not None: + embeddable_values.append(property_value) + vector_map[key][property_name] = len(embeddable_values) - 1 + else: + vector_map[key][property_name] = None - positioned_vectorized_values.append(values_list) - index += len(values) - else: - positioned_vectorized_values.append(None) + vectorized_values = await self.embed_data(embeddable_values) queries = [ await self.create_data_point_query( data_point, - positioned_vectorized_values[index], - ) for index, data_point in enumerate(data_points) + [ + vectorized_values[vector_map[str(data_point.id)][property_name]] \ + for property_name in DataPoint.get_embeddable_property_names(data_point) + ], + ) for data_point in data_points ] for query in queries: @@ -182,18 +185,21 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): return [result["edge_exists"] for result in results] - async def retrieve(self, data_point_ids: list[str]): - return self.query( + async def retrieve(self, data_point_ids: list[UUID]): + result = self.query( f"MATCH (node) WHERE node.id IN $node_ids RETURN node", { - "node_ids": data_point_ids, + "node_ids": [str(data_point) for data_point in data_point_ids], }, ) + return result.result_set - async def extract_node(self, data_point_id: str): - return await self.retrieve([data_point_id]) + async def extract_node(self, data_point_id: UUID): + result = await self.retrieve([data_point_id]) + result = result[0][0] if len(result[0]) > 0 else None + return result.properties if result else None - async def extract_nodes(self, data_point_ids: list[str]): + async def extract_nodes(self, data_point_ids: list[UUID]): return await self.retrieve(data_point_ids) async def get_connections(self, node_id: UUID) -> list: @@ -296,11 +302,11 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): return (nodes, edges) - async def delete_data_points(self, collection_name: str, data_point_ids: list[str]): + async def delete_data_points(self, collection_name: str, data_point_ids: list[UUID]): return self.query( f"MATCH (node) WHERE node.id IN $node_ids DETACH DELETE node", { - "node_ids": data_point_ids, + "node_ids": [str(data_point) for data_point in data_point_ids], }, ) @@ -324,4 +330,4 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): print(f"Error deleting graph: {e}") async def prune(self): - self.delete_graph() + await self.delete_graph() diff --git a/cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py b/cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py index ab1274fb8..de30640e5 100644 --- a/cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py +++ b/cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py @@ -1,9 +1,10 @@ -import asyncio +import logging from typing import List, Optional import litellm from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine litellm.set_verbose = False +logger = logging.getLogger("LiteLLMEmbeddingEngine") class LiteLLMEmbeddingEngine(EmbeddingEngine): api_key: str @@ -28,13 +29,17 @@ class LiteLLMEmbeddingEngine(EmbeddingEngine): async def embed_text(self, text: List[str]) -> List[List[float]]: async def get_embedding(text_): - response = await litellm.aembedding( - self.model, - input = text_, - api_key = self.api_key, - api_base = self.endpoint, - api_version = self.api_version - ) + try: + response = await litellm.aembedding( + self.model, + input = text_, + api_key = self.api_key, + api_base = self.endpoint, + api_version = self.api_version + ) + except litellm.exceptions.BadRequestError as error: + logger.error("Error embedding text: %s", str(error)) + raise error return [data["embedding"] for data in response.data] diff --git a/cognee/infrastructure/engine/models/DataPoint.py b/cognee/infrastructure/engine/models/DataPoint.py index 5d6b1d513..b76971f34 100644 --- a/cognee/infrastructure/engine/models/DataPoint.py +++ b/cognee/infrastructure/engine/models/DataPoint.py @@ -35,3 +35,7 @@ class DataPoint(BaseModel): return [getattr(data_point, field, None) for field in data_point._metadata["index_fields"]] return [] + + @classmethod + def get_embeddable_property_names(self, data_point): + return data_point._metadata["index_fields"] or [] diff --git a/cognee/modules/graph/utils/get_graph_from_model.py b/cognee/modules/graph/utils/get_graph_from_model.py index ec1da85e3..7bd300df1 100644 --- a/cognee/modules/graph/utils/get_graph_from_model.py +++ b/cognee/modules/graph/utils/get_graph_from_model.py @@ -118,17 +118,17 @@ async def get_graph_from_model( data_point_properties[field_name] = field_value - SimpleDataPointModel = copy_model( - type(data_point), - include_fields = { - "_metadata": (dict, data_point._metadata), - "__tablename__": data_point.__tablename__, - }, - exclude_fields = excluded_properties, - ) - if include_root: + SimpleDataPointModel = copy_model( + type(data_point), + include_fields = { + "_metadata": (dict, data_point._metadata), + "__tablename__": data_point.__tablename__, + }, + exclude_fields = excluded_properties, + ) nodes.append(SimpleDataPointModel(**data_point_properties)) + added_nodes[str(data_point.id)] = True return nodes, edges diff --git a/cognee/shared/CodeGraphEntities.py b/cognee/shared/CodeGraphEntities.py index 4811106e5..d709b8d3a 100644 --- a/cognee/shared/CodeGraphEntities.py +++ b/cognee/shared/CodeGraphEntities.py @@ -19,7 +19,6 @@ class CodeFile(DataPoint): } class CodePart(DataPoint): - type: str # part_of: Optional[CodeFile] source_code: str type: Optional[str] = "CodePart" diff --git a/cognee/tasks/repo_processor/enrich_dependency_graph.py b/cognee/tasks/repo_processor/enrich_dependency_graph.py index ba222ef3f..03db7b0bb 100644 --- a/cognee/tasks/repo_processor/enrich_dependency_graph.py +++ b/cognee/tasks/repo_processor/enrich_dependency_graph.py @@ -1,6 +1,5 @@ -import asyncio import networkx as nx -from typing import Dict, List +from typing import AsyncGenerator, Dict, List from tqdm.asyncio import tqdm from cognee.infrastructure.engine import DataPoint @@ -66,20 +65,25 @@ async def node_enrich_and_connect( if desc_id not in topological_order[:topological_rank + 1]: continue + desc = None if desc_id in data_points_map: desc = data_points_map[desc_id] else: node_data = await graph_engine.extract_node(desc_id) - desc = convert_node_to_data_point(node_data) + try: + desc = convert_node_to_data_point(node_data) + except Exception: + pass - new_connections.append(desc) + if desc is not None: + new_connections.append(desc) node.depends_directly_on = node.depends_directly_on or [] node.depends_directly_on.extend(new_connections) -async def enrich_dependency_graph(data_points: list[DataPoint]) -> list[DataPoint]: +async def enrich_dependency_graph(data_points: list[DataPoint]) -> AsyncGenerator[list[DataPoint], None]: """Enriches the graph with topological ranks and 'depends_on' edges.""" nodes = [] edges = [] @@ -108,17 +112,18 @@ async def enrich_dependency_graph(data_points: list[DataPoint]) -> list[DataPoin # data_points.append(node_enrich_and_connect(graph, topological_order, node)) data_points_map = {data_point.id: data_point for data_point in data_points} - data_points_futures = [] + # data_points_futures = [] for data_point in tqdm(data_points, desc = "Enriching dependency graph", unit = "data_point"): if data_point.id not in node_rank_map: continue if isinstance(data_point, CodeFile): - data_points_futures.append(node_enrich_and_connect(graph, topological_order, data_point, data_points_map)) + # data_points_futures.append(node_enrich_and_connect(graph, topological_order, data_point, data_points_map)) + await node_enrich_and_connect(graph, topological_order, data_point, data_points_map) - # yield data_point + yield data_point - await asyncio.gather(*data_points_futures) + # await asyncio.gather(*data_points_futures) - return data_points + # return data_points diff --git a/cognee/tasks/repo_processor/expand_dependency_graph.py b/cognee/tasks/repo_processor/expand_dependency_graph.py index 722bfa5c6..43a451bd6 100644 --- a/cognee/tasks/repo_processor/expand_dependency_graph.py +++ b/cognee/tasks/repo_processor/expand_dependency_graph.py @@ -1,3 +1,4 @@ +from typing import AsyncGenerator from uuid import NAMESPACE_OID, uuid5 # from tqdm import tqdm from cognee.infrastructure.engine import DataPoint @@ -53,11 +54,12 @@ def _process_single_node(code_file: CodeFile) -> None: _add_code_parts_nodes_and_edges(code_file, part_type, code_parts) -async def expand_dependency_graph(data_points: list[DataPoint]) -> list[DataPoint]: +async def expand_dependency_graph(data_points: list[DataPoint]) -> AsyncGenerator[list[DataPoint], None]: """Process Python file nodes, adding code part nodes and edges.""" # for data_point in tqdm(data_points, desc = "Expand dependency graph", unit = "data_point"): for data_point in data_points: if isinstance(data_point, CodeFile): _process_single_node(data_point) + yield data_point - return data_points + # return data_points diff --git a/cognee/tasks/repo_processor/get_repo_file_dependencies.py b/cognee/tasks/repo_processor/get_repo_file_dependencies.py index 58f3857a9..9ac4e9f2e 100644 --- a/cognee/tasks/repo_processor/get_repo_file_dependencies.py +++ b/cognee/tasks/repo_processor/get_repo_file_dependencies.py @@ -1,4 +1,5 @@ import os +from typing import AsyncGenerator from uuid import NAMESPACE_OID, uuid5 import aiofiles from tqdm.asyncio import tqdm @@ -44,7 +45,7 @@ def get_edge(file_path: str, dependency: str, repo_path: str, relative_paths: bo return (file_path, dependency, {"relation": "depends_directly_on"}) -async def get_repo_file_dependencies(repo_path: str) -> list[DataPoint]: +async def get_repo_file_dependencies(repo_path: str) -> AsyncGenerator[list[DataPoint], None]: """Generate a dependency graph for Python files in the given repository path.""" py_files_dict = await get_py_files_dict(repo_path) @@ -53,7 +54,8 @@ async def get_repo_file_dependencies(repo_path: str) -> list[DataPoint]: path = repo_path, ) - data_points = [repo] + # data_points = [repo] + yield repo # dependency_graph = nx.DiGraph() @@ -66,7 +68,8 @@ async def get_repo_file_dependencies(repo_path: str) -> list[DataPoint]: dependencies = await get_local_script_dependencies(os.path.join(repo_path, file_path), repo_path) - data_points.append(CodeFile( + # data_points.append() + yield CodeFile( id = uuid5(NAMESPACE_OID, file_path), source_code = source_code, extracted_id = file_path, @@ -78,10 +81,10 @@ async def get_repo_file_dependencies(repo_path: str) -> list[DataPoint]: part_of = repo, ) for dependency in dependencies ] if len(dependencies) else None, - )) + ) # dependency_edges = [get_edge(file_path, dependency, repo_path) for dependency in dependencies] # dependency_graph.add_edges_from(dependency_edges) - return data_points + # return data_points # return dependency_graph diff --git a/cognee/tests/test_falkordb.py b/cognee/tests/test_falkordb.py index 36c029cf7..25fe81a75 100755 --- a/cognee/tests/test_falkordb.py +++ b/cognee/tests/test_falkordb.py @@ -8,9 +8,9 @@ from cognee.shared.utils import render_graph logging.basicConfig(level = logging.DEBUG) async def main(): - data_directory_path = str(pathlib.Path(os.path.join(pathlib.Path(__file__).parent, ".data_storage/test_library")).resolve()) + data_directory_path = str(pathlib.Path(os.path.join(pathlib.Path(__file__).parent, ".data_storage/test_falkordb")).resolve()) cognee.config.data_root_directory(data_directory_path) - cognee_directory_path = str(pathlib.Path(os.path.join(pathlib.Path(__file__).parent, ".cognee_system/test_library")).resolve()) + cognee_directory_path = str(pathlib.Path(os.path.join(pathlib.Path(__file__).parent, ".cognee_system/test_falkordb")).resolve()) cognee.config.system_root_directory(cognee_directory_path) await cognee.prune.prune_data() diff --git a/evals/eval_swe_bench.py b/evals/eval_swe_bench.py index 0a4806e3f..1dd0e58ab 100644 --- a/evals/eval_swe_bench.py +++ b/evals/eval_swe_bench.py @@ -30,7 +30,6 @@ from evals.eval_utils import download_github_repo from evals.eval_utils import delete_repo async def generate_patch_with_cognee(instance): - await cognee.prune.prune_data() await cognee.prune.prune_system() @@ -44,10 +43,10 @@ async def generate_patch_with_cognee(instance): tasks = [ Task(get_repo_file_dependencies), - Task(add_data_points), - Task(enrich_dependency_graph), - Task(expand_dependency_graph), - Task(add_data_points), + Task(add_data_points, task_config = { "batch_size": 50 }), + Task(enrich_dependency_graph, task_config = { "batch_size": 50 }), + Task(expand_dependency_graph, task_config = { "batch_size": 50 }), + Task(add_data_points, task_config = { "batch_size": 50 }), # Task(summarize_code, summarization_model = SummarizedContent), ] @@ -58,7 +57,7 @@ async def generate_patch_with_cognee(instance): print('Here we have the repo under the repo_path') - await render_graph() + await render_graph(None, include_labels = True, include_nodes = True) problem_statement = instance['problem_statement'] instructions = read_query_prompt("patch_gen_instructions.txt") From c5f3314c856a0ac41ec4200d83f23797c41ccfd9 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Tue, 26 Nov 2024 16:10:03 +0100 Subject: [PATCH 11/52] Add Metadata table and read write delete functions --- cognee/modules/data/models/Data.py | 1 + cognee/modules/ingestion/models/metadata.py | 13 ++++++ .../ingestion/operations/delete_metadata.py | 16 ++++++++ .../ingestion/operations/get_metadata.py | 13 ++++++ .../ingestion/operations/write_metadata.py | 41 +++++++++++++++++++ .../ingestion/ingest_data_with_metadata.py | 7 +++- ...save_data_item_with_metadata_to_storage.py | 9 ++-- 7 files changed, 94 insertions(+), 6 deletions(-) create mode 100644 cognee/modules/ingestion/models/metadata.py create mode 100644 cognee/modules/ingestion/operations/delete_metadata.py create mode 100644 cognee/modules/ingestion/operations/get_metadata.py create mode 100644 cognee/modules/ingestion/operations/write_metadata.py diff --git a/cognee/modules/data/models/Data.py b/cognee/modules/data/models/Data.py index 2e9745600..6601580eb 100644 --- a/cognee/modules/data/models/Data.py +++ b/cognee/modules/data/models/Data.py @@ -15,6 +15,7 @@ class Data(Base): extension = Column(String) mime_type = Column(String) raw_data_location = Column(String) + metadata_id = Column(UUID) created_at = Column(DateTime(timezone = True), default = lambda: datetime.now(timezone.utc)) updated_at = Column(DateTime(timezone = True), onupdate = lambda: datetime.now(timezone.utc)) diff --git a/cognee/modules/ingestion/models/metadata.py b/cognee/modules/ingestion/models/metadata.py new file mode 100644 index 000000000..29d6cfaf2 --- /dev/null +++ b/cognee/modules/ingestion/models/metadata.py @@ -0,0 +1,13 @@ +from uuid import uuid4 +from datetime import datetime, timezone +from sqlalchemy import Column, DateTime, String, UUID +from cognee.infrastructure.databases.relational import Base + +class Metadata(Base): + __tablename__ = "queries" + + id = Column(UUID, primary_key = True, default = uuid4) + metadata = Column(String) + + created_at = Column(DateTime(timezone = True), default = lambda: datetime.now(timezone.utc)) + updated_at = Column(DateTime(timezone = True), onupdate = lambda: datetime.now(timezone.utc)) diff --git a/cognee/modules/ingestion/operations/delete_metadata.py b/cognee/modules/ingestion/operations/delete_metadata.py new file mode 100644 index 000000000..dc6bcd85f --- /dev/null +++ b/cognee/modules/ingestion/operations/delete_metadata.py @@ -0,0 +1,16 @@ +import warnings +from uuid import UUID +from sqlalchemy import select +from cognee.infrastructure.databases.relational import get_relational_engine +from ..models.Metadata import Metadata + + +async def delete_metadata(metadata_id: UUID): + db_engine = get_relational_engine() + async with db_engine.get_async_session() as session: + metadata = await session.get(Metadata, metadata_id) + if metadata is None: + warnings.warn(f"metadata for metadata_id: {metadata_id} not found") + + session.delete(metadata) + session.commit() \ No newline at end of file diff --git a/cognee/modules/ingestion/operations/get_metadata.py b/cognee/modules/ingestion/operations/get_metadata.py new file mode 100644 index 000000000..047bc4906 --- /dev/null +++ b/cognee/modules/ingestion/operations/get_metadata.py @@ -0,0 +1,13 @@ +import json +from uuid import UUID +from sqlalchemy import select +from cognee.infrastructure.databases.relational import get_relational_engine +from ..models.Metadata import Metadata + +async def get_metadata(metadata_id: UUID) -> Metadata: + db_engine = get_relational_engine() + + async with db_engine.get_async_session() as session: + metadata = await session.get(Metadata, metadata_id) + + return json.parse(metadata) diff --git a/cognee/modules/ingestion/operations/write_metadata.py b/cognee/modules/ingestion/operations/write_metadata.py new file mode 100644 index 000000000..b97c3cf85 --- /dev/null +++ b/cognee/modules/ingestion/operations/write_metadata.py @@ -0,0 +1,41 @@ +import json +import inspect +import warnings +import re +from typing import Any + +from uuid import UUID +from cognee.infrastructure.databases.relational import get_relational_engine +from ..models.Metadata import Metadata + +async def write_metadata(data_item: Any) -> UUID: + metadata_dict = get_metadata_dict(data_item) + db_engine = get_relational_engine() + async with db_engine.get_async_session() as session: + metadata = Metadata( + metadata = json.dumps(metadata_dict), + metadata_source = parse_type(type(data_item)) + ) + session.add(metadata) + await session.commit() + + return metadata.id + +def parse_type(type_: Any) -> str: + pattern = r".+'([\w_\.]+)'" + match = re.search(pattern, str(type_)) + if match: + return(match.group(1)) + else: + raise Exception(f"type: {type_} could not be parsed") + + +def get_metadata_dict(metadata: Any) -> dict[str, Any]: + if hasattr(metadata, "dict") and inspect.ismethod(getattr(metadata, "dict")): + return(metadata.dict()) + else: + warnings.warn(f"metadata of type {type(metadata)}: {str(metadata)[:20]}... does not have dict method. Defaulting to string method") + try: + return({"content": str(metadata)}) + except Exception as e: + raise Exception(f"Could not cast metadata to string: {e}") \ No newline at end of file diff --git a/cognee/tasks/ingestion/ingest_data_with_metadata.py b/cognee/tasks/ingestion/ingest_data_with_metadata.py index e5a50c13b..577dca57e 100644 --- a/cognee/tasks/ingestion/ingest_data_with_metadata.py +++ b/cognee/tasks/ingestion/ingest_data_with_metadata.py @@ -8,6 +8,7 @@ from cognee.modules.data.methods import create_dataset from cognee.modules.users.permissions.methods import give_permission_on_document from .get_dlt_destination import get_dlt_destination from .save_data_item_with_metadata_to_storage import save_data_item_with_metadata_to_storage +from cognee.modules.ingestion.operations.delete_metadata import delete_metadata async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): destination = get_dlt_destination() @@ -26,7 +27,7 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): # Process data for data_item in data: - file_path = save_data_item_with_metadata_to_storage(data_item, dataset_name) + file_path, metadata_id = await save_data_item_with_metadata_to_storage(data_item, dataset_name) # Ingest data and add metadata with open(file_path.replace("file://", ""), mode = "rb") as file: @@ -49,11 +50,12 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): )).scalar_one_or_none() if data_point is not None: + await delete_metadata(data_point.metadata_id) data_point.name = file_metadata["name"] data_point.raw_data_location = file_metadata["file_path"] data_point.extension = file_metadata["extension"] data_point.mime_type = file_metadata["mime_type"] - + data_point.metadata_id = metadata_id await session.merge(data_point) await session.commit() else: @@ -63,6 +65,7 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): raw_data_location = file_metadata["file_path"], extension = file_metadata["extension"], mime_type = file_metadata["mime_type"], + metadata_id = metadata_id ) dataset.data.append(data_point) diff --git a/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py b/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py index ec29edb89..108bcd21b 100644 --- a/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py +++ b/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py @@ -1,14 +1,15 @@ from typing import Union, BinaryIO, Any from cognee.modules.ingestion import save_data_to_file +from cognee.modules.ingestion.operations.write_metadata import write_metadata def save_data_item_with_metadata_to_storage(data_item: Union[BinaryIO, str, Any], dataset_name: str) -> str: # Dynamic import is used because the llama_index module is optional. # For the same reason Any is accepted as a data item - from llama_index.core import Document - from .transform_data import get_data_from_llama_index + metadata_id = write_metadata(data_item) # Check if data is of type Document or any of it's subclasses - if isinstance(data_item, Document): + if str(type(data_item)).startswith("llama_index"): + from .transform_data import get_data_from_llama_index file_path = get_data_from_llama_index(data_item, dataset_name) # data is a file object coming from upload. @@ -25,4 +26,4 @@ def save_data_item_with_metadata_to_storage(data_item: Union[BinaryIO, str, Any] else: raise ValueError(f"Data type not supported: {type(data_item)}") - return file_path \ No newline at end of file + return file_path, metadata_id \ No newline at end of file From fd987ed61ec0d1f148e5798dce38001d5f75ebf9 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Tue, 26 Nov 2024 16:13:08 +0100 Subject: [PATCH 12/52] Add autoformatting --- cognee/modules/data/models/Data.py | 30 +++++---- cognee/modules/ingestion/models/metadata.py | 19 ++++-- .../ingestion/operations/delete_metadata.py | 7 ++- .../ingestion/operations/get_metadata.py | 4 ++ .../ingestion/operations/write_metadata.py | 25 +++++--- .../ingestion/ingest_data_with_metadata.py | 62 +++++++++++-------- ...save_data_item_with_metadata_to_storage.py | 17 +++-- 7 files changed, 103 insertions(+), 61 deletions(-) diff --git a/cognee/modules/data/models/Data.py b/cognee/modules/data/models/Data.py index 6601580eb..55991541d 100644 --- a/cognee/modules/data/models/Data.py +++ b/cognee/modules/data/models/Data.py @@ -1,15 +1,19 @@ -from uuid import uuid4 -from typing import List from datetime import datetime, timezone -from sqlalchemy.orm import relationship, Mapped -from sqlalchemy import Column, String, DateTime, UUID +from typing import List +from uuid import uuid4 + +from sqlalchemy import UUID, Column, DateTime, String +from sqlalchemy.orm import Mapped, relationship + from cognee.infrastructure.databases.relational import Base + from .DatasetData import DatasetData + class Data(Base): __tablename__ = "data" - id = Column(UUID, primary_key = True, default = uuid4) + id = Column(UUID, primary_key=True, default=uuid4) name = Column(String) extension = Column(String) @@ -17,15 +21,19 @@ class Data(Base): raw_data_location = Column(String) metadata_id = Column(UUID) - created_at = Column(DateTime(timezone = True), default = lambda: datetime.now(timezone.utc)) - updated_at = Column(DateTime(timezone = True), onupdate = lambda: datetime.now(timezone.utc)) + created_at = Column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc) + ) + updated_at = Column( + DateTime(timezone=True), onupdate=lambda: datetime.now(timezone.utc) + ) datasets: Mapped[List["Dataset"]] = relationship( "Dataset", - secondary = DatasetData.__tablename__, - back_populates = "data", - lazy = "noload", - cascade="all, delete" + secondary=DatasetData.__tablename__, + back_populates="data", + lazy="noload", + cascade="all, delete", ) def to_json(self) -> dict: diff --git a/cognee/modules/ingestion/models/metadata.py b/cognee/modules/ingestion/models/metadata.py index 29d6cfaf2..ab9fe1e01 100644 --- a/cognee/modules/ingestion/models/metadata.py +++ b/cognee/modules/ingestion/models/metadata.py @@ -1,13 +1,20 @@ -from uuid import uuid4 from datetime import datetime, timezone -from sqlalchemy import Column, DateTime, String, UUID +from uuid import uuid4 + +from sqlalchemy import UUID, Column, DateTime, String + from cognee.infrastructure.databases.relational import Base + class Metadata(Base): __tablename__ = "queries" - id = Column(UUID, primary_key = True, default = uuid4) + id = Column(UUID, primary_key=True, default=uuid4) metadata = Column(String) - - created_at = Column(DateTime(timezone = True), default = lambda: datetime.now(timezone.utc)) - updated_at = Column(DateTime(timezone = True), onupdate = lambda: datetime.now(timezone.utc)) + + created_at = Column( + DateTime(timezone=True), default=lambda: datetime.now(timezone.utc) + ) + updated_at = Column( + DateTime(timezone=True), onupdate=lambda: datetime.now(timezone.utc) + ) diff --git a/cognee/modules/ingestion/operations/delete_metadata.py b/cognee/modules/ingestion/operations/delete_metadata.py index dc6bcd85f..df94f52ed 100644 --- a/cognee/modules/ingestion/operations/delete_metadata.py +++ b/cognee/modules/ingestion/operations/delete_metadata.py @@ -1,7 +1,10 @@ import warnings from uuid import UUID + from sqlalchemy import select + from cognee.infrastructure.databases.relational import get_relational_engine + from ..models.Metadata import Metadata @@ -11,6 +14,6 @@ async def delete_metadata(metadata_id: UUID): metadata = await session.get(Metadata, metadata_id) if metadata is None: warnings.warn(f"metadata for metadata_id: {metadata_id} not found") - + session.delete(metadata) - session.commit() \ No newline at end of file + session.commit() diff --git a/cognee/modules/ingestion/operations/get_metadata.py b/cognee/modules/ingestion/operations/get_metadata.py index 047bc4906..9034b327d 100644 --- a/cognee/modules/ingestion/operations/get_metadata.py +++ b/cognee/modules/ingestion/operations/get_metadata.py @@ -1,9 +1,13 @@ import json from uuid import UUID + from sqlalchemy import select + from cognee.infrastructure.databases.relational import get_relational_engine + from ..models.Metadata import Metadata + async def get_metadata(metadata_id: UUID) -> Metadata: db_engine = get_relational_engine() diff --git a/cognee/modules/ingestion/operations/write_metadata.py b/cognee/modules/ingestion/operations/write_metadata.py index b97c3cf85..cefed21de 100644 --- a/cognee/modules/ingestion/operations/write_metadata.py +++ b/cognee/modules/ingestion/operations/write_metadata.py @@ -1,41 +1,46 @@ -import json import inspect -import warnings +import json import re +import warnings from typing import Any - from uuid import UUID + from cognee.infrastructure.databases.relational import get_relational_engine + from ..models.Metadata import Metadata + async def write_metadata(data_item: Any) -> UUID: metadata_dict = get_metadata_dict(data_item) db_engine = get_relational_engine() async with db_engine.get_async_session() as session: metadata = Metadata( - metadata = json.dumps(metadata_dict), - metadata_source = parse_type(type(data_item)) + metadata=json.dumps(metadata_dict), + metadata_source=parse_type(type(data_item)), ) session.add(metadata) await session.commit() return metadata.id + def parse_type(type_: Any) -> str: pattern = r".+'([\w_\.]+)'" match = re.search(pattern, str(type_)) if match: - return(match.group(1)) + return match.group(1) else: raise Exception(f"type: {type_} could not be parsed") def get_metadata_dict(metadata: Any) -> dict[str, Any]: if hasattr(metadata, "dict") and inspect.ismethod(getattr(metadata, "dict")): - return(metadata.dict()) + return metadata.dict() else: - warnings.warn(f"metadata of type {type(metadata)}: {str(metadata)[:20]}... does not have dict method. Defaulting to string method") + warnings.warn( + f"metadata of type {type(metadata)}: {str(metadata)[:20]}... does not have dict method. Defaulting to string method" + ) try: - return({"content": str(metadata)}) + return {"content": str(metadata)} except Exception as e: - raise Exception(f"Could not cast metadata to string: {e}") \ No newline at end of file + raise Exception(f"Could not cast metadata to string: {e}") diff --git a/cognee/tasks/ingestion/ingest_data_with_metadata.py b/cognee/tasks/ingestion/ingest_data_with_metadata.py index 577dca57e..07f5c8115 100644 --- a/cognee/tasks/ingestion/ingest_data_with_metadata.py +++ b/cognee/tasks/ingestion/ingest_data_with_metadata.py @@ -1,24 +1,30 @@ -import dlt -import cognee.modules.ingestion as ingestion from typing import Any -from cognee.shared.utils import send_telemetry -from cognee.modules.users.models import User + +import dlt + +import cognee.modules.ingestion as ingestion from cognee.infrastructure.databases.relational import get_relational_engine from cognee.modules.data.methods import create_dataset -from cognee.modules.users.permissions.methods import give_permission_on_document -from .get_dlt_destination import get_dlt_destination -from .save_data_item_with_metadata_to_storage import save_data_item_with_metadata_to_storage from cognee.modules.ingestion.operations.delete_metadata import delete_metadata +from cognee.modules.users.models import User +from cognee.modules.users.permissions.methods import give_permission_on_document +from cognee.shared.utils import send_telemetry + +from .get_dlt_destination import get_dlt_destination +from .save_data_item_with_metadata_to_storage import ( + save_data_item_with_metadata_to_storage, +) + async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): destination = get_dlt_destination() pipeline = dlt.pipeline( - pipeline_name = "file_load_from_filesystem", - destination = destination, + pipeline_name="file_load_from_filesystem", + destination=destination, ) - @dlt.resource(standalone = True, merge_key = "id") + @dlt.resource(standalone=True, merge_key="id") async def data_resources(data: Any, user: User): if not isinstance(data, list): # Convert data to a list as we work with lists further down. @@ -27,10 +33,12 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): # Process data for data_item in data: - file_path, metadata_id = await save_data_item_with_metadata_to_storage(data_item, dataset_name) + file_path, metadata_id = await save_data_item_with_metadata_to_storage( + data_item, dataset_name + ) # Ingest data and add metadata - with open(file_path.replace("file://", ""), mode = "rb") as file: + with open(file_path.replace("file://", ""), mode="rb") as file: classified_data = ingestion.classify(file) data_id = ingestion.identify(classified_data) @@ -38,6 +46,7 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): file_metadata = classified_data.get_metadata() from sqlalchemy import select + from cognee.modules.data.models import Data db_engine = get_relational_engine() @@ -45,9 +54,9 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): async with db_engine.get_async_session() as session: dataset = await create_dataset(dataset_name, user.id, session) - data_point = (await session.execute( - select(Data).filter(Data.id == data_id) - )).scalar_one_or_none() + data_point = ( + await session.execute(select(Data).filter(Data.id == data_id)) + ).scalar_one_or_none() if data_point is not None: await delete_metadata(data_point.metadata_id) @@ -60,12 +69,12 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): await session.commit() else: data_point = Data( - id = data_id, - name = file_metadata["name"], - raw_data_location = file_metadata["file_path"], - extension = file_metadata["extension"], - mime_type = file_metadata["mime_type"], - metadata_id = metadata_id + id=data_id, + name=file_metadata["name"], + raw_data_location=file_metadata["file_path"], + extension=file_metadata["extension"], + mime_type=file_metadata["mime_type"], + metadata_id=metadata_id, ) dataset.data.append(data_point) @@ -82,14 +91,13 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): await give_permission_on_document(user, data_id, "read") await give_permission_on_document(user, data_id, "write") - - send_telemetry("cognee.add EXECUTION STARTED", user_id = user.id) + send_telemetry("cognee.add EXECUTION STARTED", user_id=user.id) run_info = pipeline.run( data_resources(data, user), - table_name = "file_metadata", - dataset_name = dataset_name, - write_disposition = "merge", + table_name="file_metadata", + dataset_name=dataset_name, + write_disposition="merge", ) - send_telemetry("cognee.add EXECUTION COMPLETED", user_id = user.id) + send_telemetry("cognee.add EXECUTION COMPLETED", user_id=user.id) return run_info diff --git a/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py b/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py index 108bcd21b..9acf0d413 100644 --- a/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py +++ b/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py @@ -1,20 +1,27 @@ -from typing import Union, BinaryIO, Any +from typing import Any, BinaryIO, Union + from cognee.modules.ingestion import save_data_to_file from cognee.modules.ingestion.operations.write_metadata import write_metadata -def save_data_item_with_metadata_to_storage(data_item: Union[BinaryIO, str, Any], dataset_name: str) -> str: - # Dynamic import is used because the llama_index module is optional. + +def save_data_item_with_metadata_to_storage( + data_item: Union[BinaryIO, str, Any], dataset_name: str +) -> str: + # Dynamic import is used because the llama_index module is optional. # For the same reason Any is accepted as a data item metadata_id = write_metadata(data_item) # Check if data is of type Document or any of it's subclasses if str(type(data_item)).startswith("llama_index"): from .transform_data import get_data_from_llama_index + file_path = get_data_from_llama_index(data_item, dataset_name) # data is a file object coming from upload. elif hasattr(data_item, "file"): - file_path = save_data_to_file(data_item.file, dataset_name, filename=data_item.filename) + file_path = save_data_to_file( + data_item.file, dataset_name, filename=data_item.filename + ) elif isinstance(data_item, str): # data is a file path @@ -26,4 +33,4 @@ def save_data_item_with_metadata_to_storage(data_item: Union[BinaryIO, str, Any] else: raise ValueError(f"Data type not supported: {type(data_item)}") - return file_path, metadata_id \ No newline at end of file + return file_path, metadata_id From 7324564655c70dc4a3a038959283f3d697893f7e Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Tue, 26 Nov 2024 16:30:25 +0100 Subject: [PATCH 13/52] Add metadata_id attribute to Document and DocumentChunk, make ingest_with_metadata default --- cognee/api/v1/add/add_v2.py | 5 ++--- cognee/modules/chunking/TextChunker.py | 3 +++ cognee/modules/data/processing/document_types/Document.py | 2 ++ cognee/tasks/documents/classify_documents.py | 1 + 4 files changed, 8 insertions(+), 3 deletions(-) diff --git a/cognee/api/v1/add/add_v2.py b/cognee/api/v1/add/add_v2.py index 9d6e33012..631d963e5 100644 --- a/cognee/api/v1/add/add_v2.py +++ b/cognee/api/v1/add/add_v2.py @@ -2,7 +2,7 @@ from typing import Union, BinaryIO from cognee.modules.users.models import User from cognee.modules.users.methods import get_default_user from cognee.modules.pipelines import run_tasks, Task -from cognee.tasks.ingestion import save_data_to_storage, ingest_data +from cognee.tasks.ingestion import ingest_data_with_metadata from cognee.infrastructure.databases.relational import create_db_and_tables as create_relational_db_and_tables from cognee.infrastructure.databases.vector.pgvector import create_db_and_tables as create_pgvector_db_and_tables @@ -14,8 +14,7 @@ async def add(data: Union[BinaryIO, list[BinaryIO], str, list[str]], dataset_nam user = await get_default_user() tasks = [ - Task(save_data_to_storage, dataset_name), - Task(ingest_data, dataset_name, user) + Task(ingest_data_with_metadata, dataset_name, user) ] pipeline = run_tasks(tasks, data, "add_pipeline") diff --git a/cognee/modules/chunking/TextChunker.py b/cognee/modules/chunking/TextChunker.py index f0a72b58a..24ed0b236 100644 --- a/cognee/modules/chunking/TextChunker.py +++ b/cognee/modules/chunking/TextChunker.py @@ -35,6 +35,7 @@ class TextChunker(): is_part_of = self.document, chunk_index = self.chunk_index, cut_type = chunk_data["cut_type"], + metadata_id = self.document.metadata_id ) paragraph_chunks = [] self.chunk_size = 0 @@ -48,6 +49,7 @@ class TextChunker(): is_part_of = self.document, chunk_index = self.chunk_index, cut_type = paragraph_chunks[len(paragraph_chunks) - 1]["cut_type"], + metadata_id = self.document.metadata_id ) except Exception as e: print(e) @@ -65,6 +67,7 @@ class TextChunker(): is_part_of = self.document, chunk_index = self.chunk_index, cut_type = paragraph_chunks[len(paragraph_chunks) - 1]["cut_type"], + metadata_id = self.document.metadata_id ) except Exception as e: print(e) diff --git a/cognee/modules/data/processing/document_types/Document.py b/cognee/modules/data/processing/document_types/Document.py index 7d5545cfc..773fc30c8 100644 --- a/cognee/modules/data/processing/document_types/Document.py +++ b/cognee/modules/data/processing/document_types/Document.py @@ -1,9 +1,11 @@ from cognee.infrastructure.engine import DataPoint +from uuid import UUID class Document(DataPoint): type: str name: str raw_data_location: str + metadata_id: UUID def read(self, chunk_size: int) -> str: pass diff --git a/cognee/tasks/documents/classify_documents.py b/cognee/tasks/documents/classify_documents.py index 8ee87bcad..599b74e17 100644 --- a/cognee/tasks/documents/classify_documents.py +++ b/cognee/tasks/documents/classify_documents.py @@ -45,6 +45,7 @@ def classify_documents(data_documents: list[Data]) -> list[Document]: title=f"{data_item.name}.{data_item.extension}", raw_data_location=data_item.raw_data_location, name=data_item.name, + metadata_id=data_item.metadata_id ) for data_item in data_documents ] From cc0127a90e0ea7cbf0cf16df62dbbfbb425e6bc4 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Tue, 26 Nov 2024 16:34:38 +0100 Subject: [PATCH 14/52] Fix Metadata file name --- cognee/modules/ingestion/models/{metadata.py => Metadata.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename cognee/modules/ingestion/models/{metadata.py => Metadata.py} (100%) diff --git a/cognee/modules/ingestion/models/metadata.py b/cognee/modules/ingestion/models/Metadata.py similarity index 100% rename from cognee/modules/ingestion/models/metadata.py rename to cognee/modules/ingestion/models/Metadata.py From 899275c25edb0c3814368a4d8560abe8e096d968 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Tue, 26 Nov 2024 16:38:24 +0100 Subject: [PATCH 15/52] Rename metadata field to metadata_repr --- cognee/modules/ingestion/models/Metadata.py | 5 +++-- cognee/modules/ingestion/operations/write_metadata.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/cognee/modules/ingestion/models/Metadata.py b/cognee/modules/ingestion/models/Metadata.py index ab9fe1e01..5d6333bc3 100644 --- a/cognee/modules/ingestion/models/Metadata.py +++ b/cognee/modules/ingestion/models/Metadata.py @@ -7,10 +7,11 @@ from cognee.infrastructure.databases.relational import Base class Metadata(Base): - __tablename__ = "queries" + __tablename__ = "metadata_table" id = Column(UUID, primary_key=True, default=uuid4) - metadata = Column(String) + metadata_repr = Column(String) + metadata_source = Column(String) created_at = Column( DateTime(timezone=True), default=lambda: datetime.now(timezone.utc) diff --git a/cognee/modules/ingestion/operations/write_metadata.py b/cognee/modules/ingestion/operations/write_metadata.py index cefed21de..b1b40a3d4 100644 --- a/cognee/modules/ingestion/operations/write_metadata.py +++ b/cognee/modules/ingestion/operations/write_metadata.py @@ -15,7 +15,7 @@ async def write_metadata(data_item: Any) -> UUID: db_engine = get_relational_engine() async with db_engine.get_async_session() as session: metadata = Metadata( - metadata=json.dumps(metadata_dict), + metadata_repr=json.dumps(metadata_dict), metadata_source=parse_type(type(data_item)), ) session.add(metadata) From 20d721f5ca07468a40d173b9f588b413c520e80f Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Tue, 26 Nov 2024 16:47:15 +0100 Subject: [PATCH 16/52] Add metadata_id field to documents in integration tests --- cognee/tests/integration/documents/AudioDocument_test.py | 2 +- cognee/tests/integration/documents/ImageDocument_test.py | 2 +- cognee/tests/integration/documents/PdfDocument_test.py | 2 +- cognee/tests/integration/documents/TextDocument_test.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cognee/tests/integration/documents/AudioDocument_test.py b/cognee/tests/integration/documents/AudioDocument_test.py index f133ef811..a35e3892b 100644 --- a/cognee/tests/integration/documents/AudioDocument_test.py +++ b/cognee/tests/integration/documents/AudioDocument_test.py @@ -27,7 +27,7 @@ TEST_TEXT = """ def test_AudioDocument(): document = AudioDocument( - id=uuid.uuid4(), name="audio-dummy-test", raw_data_location="" + id=uuid.uuid4(), name="audio-dummy-test", raw_data_location="", metadata_id=uuid.uuid4() ) with patch.object(AudioDocument, "create_transcript", return_value=TEST_TEXT): for ground_truth, paragraph_data in zip( diff --git a/cognee/tests/integration/documents/ImageDocument_test.py b/cognee/tests/integration/documents/ImageDocument_test.py index e9caf3634..9f5952c40 100644 --- a/cognee/tests/integration/documents/ImageDocument_test.py +++ b/cognee/tests/integration/documents/ImageDocument_test.py @@ -16,7 +16,7 @@ The commotion has attracted an audience: a murder of crows has gathered in the l def test_ImageDocument(): document = ImageDocument( - id=uuid.uuid4(), name="image-dummy-test", raw_data_location="" + id=uuid.uuid4(), name="image-dummy-test", raw_data_location="", metadata_id=uuid.uuid4() ) with patch.object(ImageDocument, "transcribe_image", return_value=TEST_TEXT): diff --git a/cognee/tests/integration/documents/PdfDocument_test.py b/cognee/tests/integration/documents/PdfDocument_test.py index d8ddbe23c..fbfe236db 100644 --- a/cognee/tests/integration/documents/PdfDocument_test.py +++ b/cognee/tests/integration/documents/PdfDocument_test.py @@ -17,7 +17,7 @@ def test_PdfDocument(): "artificial-intelligence.pdf", ) document = PdfDocument( - id=uuid.uuid4(), name="Test document.pdf", raw_data_location=test_file_path + id=uuid.uuid4(), name="Test document.pdf", raw_data_location=test_file_path, metadata_id=uuid.uuid4() ) for ground_truth, paragraph_data in zip( diff --git a/cognee/tests/integration/documents/TextDocument_test.py b/cognee/tests/integration/documents/TextDocument_test.py index ef7d42272..46adee094 100644 --- a/cognee/tests/integration/documents/TextDocument_test.py +++ b/cognee/tests/integration/documents/TextDocument_test.py @@ -29,7 +29,7 @@ def test_TextDocument(input_file, chunk_size): input_file, ) document = TextDocument( - id=uuid.uuid4(), name=input_file, raw_data_location=test_file_path + id=uuid.uuid4(), name=input_file, raw_data_location=test_file_path, metadata_id=uuid.uuid4() ) for ground_truth, paragraph_data in zip( From 5b5c1ea5c6b4be3ec5853d9434564fdfc8254e6f Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Tue, 26 Nov 2024 17:53:43 +0100 Subject: [PATCH 17/52] Fix module import error --- cognee/tasks/ingestion/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cognee/tasks/ingestion/__init__.py b/cognee/tasks/ingestion/__init__.py index 56cab2756..f569267a1 100644 --- a/cognee/tasks/ingestion/__init__.py +++ b/cognee/tasks/ingestion/__init__.py @@ -2,3 +2,4 @@ from .ingest_data import ingest_data from .save_data_to_storage import save_data_to_storage from .save_data_item_to_storage import save_data_item_to_storage from .save_data_item_with_metadata_to_storage import save_data_item_with_metadata_to_storage +from .ingest_data_with_metadata import ingest_data_with_metadata From 9e93ea07946487d7b2f45177ea22ef964b87e1a1 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Wed, 27 Nov 2024 09:20:44 +0100 Subject: [PATCH 18/52] Make save_data_item_with_metadata_to_storage async --- .../ingestion/save_data_item_with_metadata_to_storage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py b/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py index 9acf0d413..6695153d0 100644 --- a/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py +++ b/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py @@ -4,12 +4,12 @@ from cognee.modules.ingestion import save_data_to_file from cognee.modules.ingestion.operations.write_metadata import write_metadata -def save_data_item_with_metadata_to_storage( +async def save_data_item_with_metadata_to_storage( data_item: Union[BinaryIO, str, Any], dataset_name: str ) -> str: # Dynamic import is used because the llama_index module is optional. # For the same reason Any is accepted as a data item - metadata_id = write_metadata(data_item) + metadata_id = await write_metadata(data_item) # Check if data is of type Document or any of it's subclasses if str(type(data_item)).startswith("llama_index"): From 159985b5012d6d1a7381c6bde51fb090e6c0468e Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Wed, 27 Nov 2024 11:27:17 +0100 Subject: [PATCH 19/52] Remove line in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ed4489fbf..1bb232b66 100644 --- a/README.md +++ b/README.md @@ -325,4 +325,4 @@ pip install cognee | Neo4j | Graph | Stable ✅ | | | NetworkX | Graph | Stable ✅ | | | FalkorDB | Vector/Graph | Unstable ❌ | | -| PGVector | Vector | Unstable ❌ | Postgres DB returns the Timeout error | +| PGVector | Vector | Unstable ❌ | Postgres DB returns the Timeout error | \ No newline at end of file From 80517f5117f7b66c12634d7326e814918c9600e3 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Wed, 27 Nov 2024 11:27:55 +0100 Subject: [PATCH 20/52] Revert README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1bb232b66..ed4489fbf 100644 --- a/README.md +++ b/README.md @@ -325,4 +325,4 @@ pip install cognee | Neo4j | Graph | Stable ✅ | | | NetworkX | Graph | Stable ✅ | | | FalkorDB | Vector/Graph | Unstable ❌ | | -| PGVector | Vector | Unstable ❌ | Postgres DB returns the Timeout error | \ No newline at end of file +| PGVector | Vector | Unstable ❌ | Postgres DB returns the Timeout error | From aacba555c92132ed4d781062614fe7d7b3d66657 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Wed, 27 Nov 2024 12:22:28 +0100 Subject: [PATCH 21/52] Remove passing of metadata_id to DocumentChunk --- cognee/modules/chunking/TextChunker.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/cognee/modules/chunking/TextChunker.py b/cognee/modules/chunking/TextChunker.py index 24ed0b236..a98d36961 100644 --- a/cognee/modules/chunking/TextChunker.py +++ b/cognee/modules/chunking/TextChunker.py @@ -34,8 +34,7 @@ class TextChunker(): word_count = chunk_data["word_count"], is_part_of = self.document, chunk_index = self.chunk_index, - cut_type = chunk_data["cut_type"], - metadata_id = self.document.metadata_id + cut_type = chunk_data["cut_type"] ) paragraph_chunks = [] self.chunk_size = 0 @@ -48,8 +47,7 @@ class TextChunker(): word_count = self.chunk_size, is_part_of = self.document, chunk_index = self.chunk_index, - cut_type = paragraph_chunks[len(paragraph_chunks) - 1]["cut_type"], - metadata_id = self.document.metadata_id + cut_type = paragraph_chunks[len(paragraph_chunks) - 1]["cut_type"] ) except Exception as e: print(e) @@ -66,8 +64,7 @@ class TextChunker(): word_count = self.chunk_size, is_part_of = self.document, chunk_index = self.chunk_index, - cut_type = paragraph_chunks[len(paragraph_chunks) - 1]["cut_type"], - metadata_id = self.document.metadata_id + cut_type = paragraph_chunks[len(paragraph_chunks) - 1]["cut_type"] ) except Exception as e: print(e) From 3d5cb7644a21d8e032dddd7dfb628fb2bccba573 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Wed, 27 Nov 2024 12:50:39 +0100 Subject: [PATCH 22/52] Pass DocumentChunk metadata_id to _metadata field --- cognee/modules/chunking/TextChunker.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/cognee/modules/chunking/TextChunker.py b/cognee/modules/chunking/TextChunker.py index a98d36961..f38058b0e 100644 --- a/cognee/modules/chunking/TextChunker.py +++ b/cognee/modules/chunking/TextChunker.py @@ -34,7 +34,11 @@ class TextChunker(): word_count = chunk_data["word_count"], is_part_of = self.document, chunk_index = self.chunk_index, - cut_type = chunk_data["cut_type"] + cut_type = chunk_data["cut_type"], + _metadata = { + "index_fields": ["text"], + "metadata_id": self.document.metadata_id + } ) paragraph_chunks = [] self.chunk_size = 0 @@ -47,7 +51,11 @@ class TextChunker(): word_count = self.chunk_size, is_part_of = self.document, chunk_index = self.chunk_index, - cut_type = paragraph_chunks[len(paragraph_chunks) - 1]["cut_type"] + cut_type = paragraph_chunks[len(paragraph_chunks) - 1]["cut_type"], + _metadata = { + "index_fields": ["text"], + "metadata_id": self.document.metadata_id + } ) except Exception as e: print(e) @@ -64,7 +72,11 @@ class TextChunker(): word_count = self.chunk_size, is_part_of = self.document, chunk_index = self.chunk_index, - cut_type = paragraph_chunks[len(paragraph_chunks) - 1]["cut_type"] + cut_type = paragraph_chunks[len(paragraph_chunks) - 1]["cut_type"], + _metadata = { + "index_fields": ["text"], + "metadata_id": self.document.metadata_id + } ) except Exception as e: print(e) From 1679c746a3ea249562ce57dd6a0b47d7611e6268 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Wed, 27 Nov 2024 14:15:03 +0100 Subject: [PATCH 23/52] Move class and functions to data.models --- cognee/modules/data/models/Data.py | 13 +++++++++++-- .../modules/{ingestion => data}/models/Metadata.py | 5 ++++- .../operations/delete_metadata.py | 0 .../{ingestion => data}/operations/get_metadata.py | 0 .../operations/write_metadata.py | 0 cognee/tasks/ingestion/ingest_data_with_metadata.py | 2 +- .../save_data_item_with_metadata_to_storage.py | 2 +- 7 files changed, 17 insertions(+), 5 deletions(-) rename cognee/modules/{ingestion => data}/models/Metadata.py (66%) rename cognee/modules/{ingestion => data}/operations/delete_metadata.py (100%) rename cognee/modules/{ingestion => data}/operations/get_metadata.py (100%) rename cognee/modules/{ingestion => data}/operations/write_metadata.py (100%) diff --git a/cognee/modules/data/models/Data.py b/cognee/modules/data/models/Data.py index 55991541d..7899b017f 100644 --- a/cognee/modules/data/models/Data.py +++ b/cognee/modules/data/models/Data.py @@ -8,7 +8,7 @@ from sqlalchemy.orm import Mapped, relationship from cognee.infrastructure.databases.relational import Base from .DatasetData import DatasetData - +from .Metadata import Metadata class Data(Base): __tablename__ = "data" @@ -28,7 +28,7 @@ class Data(Base): DateTime(timezone=True), onupdate=lambda: datetime.now(timezone.utc) ) - datasets: Mapped[List["Dataset"]] = relationship( + datasets = relationship( "Dataset", secondary=DatasetData.__tablename__, back_populates="data", @@ -36,6 +36,15 @@ class Data(Base): cascade="all, delete", ) + metadata = relationship( + "Metadata", + secondary=Metadata.__tablename__, + back_populates="data", + lazy="noload", + cascade="all, delete", + ) + + def to_json(self) -> dict: return { "id": str(self.id), diff --git a/cognee/modules/ingestion/models/Metadata.py b/cognee/modules/data/models/Metadata.py similarity index 66% rename from cognee/modules/ingestion/models/Metadata.py rename to cognee/modules/data/models/Metadata.py index 5d6333bc3..adf254941 100644 --- a/cognee/modules/ingestion/models/Metadata.py +++ b/cognee/modules/data/models/Metadata.py @@ -1,7 +1,7 @@ from datetime import datetime, timezone from uuid import uuid4 -from sqlalchemy import UUID, Column, DateTime, String +from sqlalchemy import UUID, Column, DateTime, String, ForeignKey, relationship from cognee.infrastructure.databases.relational import Base @@ -19,3 +19,6 @@ class Metadata(Base): updated_at = Column( DateTime(timezone=True), onupdate=lambda: datetime.now(timezone.utc) ) + + dataset_id = Column(UUID, ForeignKey("datasets.id", ondelete="CASCADE"), primary_key = True) + data_id = Column(UUID, ForeignKey("data.id", ondelete="CASCADE"), primary_key = True) \ No newline at end of file diff --git a/cognee/modules/ingestion/operations/delete_metadata.py b/cognee/modules/data/operations/delete_metadata.py similarity index 100% rename from cognee/modules/ingestion/operations/delete_metadata.py rename to cognee/modules/data/operations/delete_metadata.py diff --git a/cognee/modules/ingestion/operations/get_metadata.py b/cognee/modules/data/operations/get_metadata.py similarity index 100% rename from cognee/modules/ingestion/operations/get_metadata.py rename to cognee/modules/data/operations/get_metadata.py diff --git a/cognee/modules/ingestion/operations/write_metadata.py b/cognee/modules/data/operations/write_metadata.py similarity index 100% rename from cognee/modules/ingestion/operations/write_metadata.py rename to cognee/modules/data/operations/write_metadata.py diff --git a/cognee/tasks/ingestion/ingest_data_with_metadata.py b/cognee/tasks/ingestion/ingest_data_with_metadata.py index 07f5c8115..e7068a008 100644 --- a/cognee/tasks/ingestion/ingest_data_with_metadata.py +++ b/cognee/tasks/ingestion/ingest_data_with_metadata.py @@ -5,7 +5,7 @@ import dlt import cognee.modules.ingestion as ingestion from cognee.infrastructure.databases.relational import get_relational_engine from cognee.modules.data.methods import create_dataset -from cognee.modules.ingestion.operations.delete_metadata import delete_metadata +from cognee.modules.data.operations.delete_metadata import delete_metadata from cognee.modules.users.models import User from cognee.modules.users.permissions.methods import give_permission_on_document from cognee.shared.utils import send_telemetry diff --git a/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py b/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py index 6695153d0..11512a1c4 100644 --- a/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py +++ b/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py @@ -1,7 +1,7 @@ from typing import Any, BinaryIO, Union from cognee.modules.ingestion import save_data_to_file -from cognee.modules.ingestion.operations.write_metadata import write_metadata +from cognee.modules.data.operations.write_metadata import write_metadata async def save_data_item_with_metadata_to_storage( From cd0e505ac00bf3432a0a23c6b0ee50415f6ed4ea Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Wed, 27 Nov 2024 15:35:23 +0100 Subject: [PATCH 24/52] WIP --- cognee/modules/data/models/Data.py | 5 +---- cognee/modules/data/models/Metadata.py | 8 +++++--- cognee/modules/data/operations/write_metadata.py | 4 ++-- cognee/tasks/documents/classify_documents.py | 9 +++++---- .../tasks/ingestion/ingest_data_with_metadata.py | 15 +++++++-------- .../save_data_item_with_metadata_to_storage.py | 5 +---- 6 files changed, 21 insertions(+), 25 deletions(-) diff --git a/cognee/modules/data/models/Data.py b/cognee/modules/data/models/Data.py index 7899b017f..f1b033dd0 100644 --- a/cognee/modules/data/models/Data.py +++ b/cognee/modules/data/models/Data.py @@ -19,8 +19,6 @@ class Data(Base): extension = Column(String) mime_type = Column(String) raw_data_location = Column(String) - metadata_id = Column(UUID) - created_at = Column( DateTime(timezone=True), default=lambda: datetime.now(timezone.utc) ) @@ -36,9 +34,8 @@ class Data(Base): cascade="all, delete", ) - metadata = relationship( + metadata_relationship = relationship( "Metadata", - secondary=Metadata.__tablename__, back_populates="data", lazy="noload", cascade="all, delete", diff --git a/cognee/modules/data/models/Metadata.py b/cognee/modules/data/models/Metadata.py index adf254941..3ab30b38d 100644 --- a/cognee/modules/data/models/Metadata.py +++ b/cognee/modules/data/models/Metadata.py @@ -1,7 +1,8 @@ from datetime import datetime, timezone from uuid import uuid4 -from sqlalchemy import UUID, Column, DateTime, String, ForeignKey, relationship +from sqlalchemy import UUID, Column, DateTime, String, ForeignKey +from sqlalchemy.orm import relationship from cognee.infrastructure.databases.relational import Base @@ -20,5 +21,6 @@ class Metadata(Base): DateTime(timezone=True), onupdate=lambda: datetime.now(timezone.utc) ) - dataset_id = Column(UUID, ForeignKey("datasets.id", ondelete="CASCADE"), primary_key = True) - data_id = Column(UUID, ForeignKey("data.id", ondelete="CASCADE"), primary_key = True) \ No newline at end of file + data_id = Column(UUID, ForeignKey("data.id", ondelete="CASCADE"), primary_key = False) + data = relationship("Data", back_populates="metadata_relationship") + diff --git a/cognee/modules/data/operations/write_metadata.py b/cognee/modules/data/operations/write_metadata.py index b1b40a3d4..749aed831 100644 --- a/cognee/modules/data/operations/write_metadata.py +++ b/cognee/modules/data/operations/write_metadata.py @@ -10,18 +10,18 @@ from cognee.infrastructure.databases.relational import get_relational_engine from ..models.Metadata import Metadata -async def write_metadata(data_item: Any) -> UUID: +async def write_metadata(data_item: Any, data_id: UUID) -> UUID: metadata_dict = get_metadata_dict(data_item) db_engine = get_relational_engine() async with db_engine.get_async_session() as session: metadata = Metadata( metadata_repr=json.dumps(metadata_dict), metadata_source=parse_type(type(data_item)), + data_id=data_id ) session.add(metadata) await session.commit() - return metadata.id def parse_type(type_: Any) -> str: diff --git a/cognee/tasks/documents/classify_documents.py b/cognee/tasks/documents/classify_documents.py index 599b74e17..79ad8245f 100644 --- a/cognee/tasks/documents/classify_documents.py +++ b/cognee/tasks/documents/classify_documents.py @@ -39,14 +39,15 @@ EXTENSION_TO_DOCUMENT_CLASS = { def classify_documents(data_documents: list[Data]) -> list[Document]: - documents = [ - EXTENSION_TO_DOCUMENT_CLASS[data_item.extension]( + documents = [] + for data_item in data_documents: + document = EXTENSION_TO_DOCUMENT_CLASS[data_item.extension]( id=data_item.id, title=f"{data_item.name}.{data_item.extension}", raw_data_location=data_item.raw_data_location, name=data_item.name, metadata_id=data_item.metadata_id ) - for data_item in data_documents - ] + documents.append(document) + return documents diff --git a/cognee/tasks/ingestion/ingest_data_with_metadata.py b/cognee/tasks/ingestion/ingest_data_with_metadata.py index e7068a008..d2c91e607 100644 --- a/cognee/tasks/ingestion/ingest_data_with_metadata.py +++ b/cognee/tasks/ingestion/ingest_data_with_metadata.py @@ -9,13 +9,14 @@ from cognee.modules.data.operations.delete_metadata import delete_metadata from cognee.modules.users.models import User from cognee.modules.users.permissions.methods import give_permission_on_document from cognee.shared.utils import send_telemetry - +from cognee.modules.data.operations.write_metadata import write_metadata from .get_dlt_destination import get_dlt_destination from .save_data_item_with_metadata_to_storage import ( save_data_item_with_metadata_to_storage, ) + async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): destination = get_dlt_destination() @@ -32,8 +33,7 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): # Process data for data_item in data: - - file_path, metadata_id = await save_data_item_with_metadata_to_storage( + file_path = await save_data_item_with_metadata_to_storage( data_item, dataset_name ) @@ -64,21 +64,20 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): data_point.raw_data_location = file_metadata["file_path"] data_point.extension = file_metadata["extension"] data_point.mime_type = file_metadata["mime_type"] - data_point.metadata_id = metadata_id await session.merge(data_point) - await session.commit() else: data_point = Data( id=data_id, name=file_metadata["name"], raw_data_location=file_metadata["file_path"], extension=file_metadata["extension"], - mime_type=file_metadata["mime_type"], - metadata_id=metadata_id, + mime_type=file_metadata["mime_type"] ) dataset.data.append(data_point) - await session.commit() + await session.commit() + await write_metadata(data_item, data_point.id) + yield { "id": data_id, diff --git a/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py b/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py index 11512a1c4..c07327238 100644 --- a/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py +++ b/cognee/tasks/ingestion/save_data_item_with_metadata_to_storage.py @@ -1,7 +1,6 @@ from typing import Any, BinaryIO, Union from cognee.modules.ingestion import save_data_to_file -from cognee.modules.data.operations.write_metadata import write_metadata async def save_data_item_with_metadata_to_storage( @@ -9,8 +8,6 @@ async def save_data_item_with_metadata_to_storage( ) -> str: # Dynamic import is used because the llama_index module is optional. # For the same reason Any is accepted as a data item - metadata_id = await write_metadata(data_item) - # Check if data is of type Document or any of it's subclasses if str(type(data_item)).startswith("llama_index"): from .transform_data import get_data_from_llama_index @@ -33,4 +30,4 @@ async def save_data_item_with_metadata_to_storage( else: raise ValueError(f"Data type not supported: {type(data_item)}") - return file_path, metadata_id + return file_path From 15802237e973711db1f08b5fbd831c24360e0c2b Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Wed, 27 Nov 2024 16:27:55 +0100 Subject: [PATCH 25/52] Get metadata from metadata table --- cognee/modules/data/operations/get_metadata.py | 4 +++- cognee/modules/data/operations/write_metadata.py | 1 + cognee/tasks/documents/classify_documents.py | 6 ++++-- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/cognee/modules/data/operations/get_metadata.py b/cognee/modules/data/operations/get_metadata.py index 9034b327d..26637e383 100644 --- a/cognee/modules/data/operations/get_metadata.py +++ b/cognee/modules/data/operations/get_metadata.py @@ -14,4 +14,6 @@ async def get_metadata(metadata_id: UUID) -> Metadata: async with db_engine.get_async_session() as session: metadata = await session.get(Metadata, metadata_id) - return json.parse(metadata) + return metadata + + diff --git a/cognee/modules/data/operations/write_metadata.py b/cognee/modules/data/operations/write_metadata.py index 749aed831..4b550a6bf 100644 --- a/cognee/modules/data/operations/write_metadata.py +++ b/cognee/modules/data/operations/write_metadata.py @@ -15,6 +15,7 @@ async def write_metadata(data_item: Any, data_id: UUID) -> UUID: db_engine = get_relational_engine() async with db_engine.get_async_session() as session: metadata = Metadata( + id=data_id, metadata_repr=json.dumps(metadata_dict), metadata_source=parse_type(type(data_item)), data_id=data_id diff --git a/cognee/tasks/documents/classify_documents.py b/cognee/tasks/documents/classify_documents.py index 79ad8245f..41ffc45bd 100644 --- a/cognee/tasks/documents/classify_documents.py +++ b/cognee/tasks/documents/classify_documents.py @@ -6,6 +6,7 @@ from cognee.modules.data.processing.document_types import ( ImageDocument, TextDocument, ) +from cognee.modules.data.operations.get_metadata import get_metadata EXTENSION_TO_DOCUMENT_CLASS = { "pdf": PdfDocument, # Text documents @@ -38,15 +39,16 @@ EXTENSION_TO_DOCUMENT_CLASS = { } -def classify_documents(data_documents: list[Data]) -> list[Document]: +async def classify_documents(data_documents: list[Data]) -> list[Document]: documents = [] for data_item in data_documents: + metadata = await get_metadata(data_item.id) document = EXTENSION_TO_DOCUMENT_CLASS[data_item.extension]( id=data_item.id, title=f"{data_item.name}.{data_item.extension}", raw_data_location=data_item.raw_data_location, name=data_item.name, - metadata_id=data_item.metadata_id + metadata_id=metadata.id ) documents.append(document) From d4e77636b5428591ec35560f316fba5264020f9a Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Wed, 27 Nov 2024 16:53:53 +0100 Subject: [PATCH 26/52] Revert spaces around args --- .../ingestion/ingest_data_with_metadata.py | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/cognee/tasks/ingestion/ingest_data_with_metadata.py b/cognee/tasks/ingestion/ingest_data_with_metadata.py index d2c91e607..573e2c3c1 100644 --- a/cognee/tasks/ingestion/ingest_data_with_metadata.py +++ b/cognee/tasks/ingestion/ingest_data_with_metadata.py @@ -25,7 +25,7 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): destination=destination, ) - @dlt.resource(standalone=True, merge_key="id") + @dlt.resource(standalone = True, merge_key = "id") async def data_resources(data: Any, user: User): if not isinstance(data, list): # Convert data to a list as we work with lists further down. @@ -38,7 +38,7 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): ) # Ingest data and add metadata - with open(file_path.replace("file://", ""), mode="rb") as file: + with open(file_path.replace("file://", ""), mode = "rb") as file: classified_data = ingestion.classify(file) data_id = ingestion.identify(classified_data) @@ -67,11 +67,11 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): await session.merge(data_point) else: data_point = Data( - id=data_id, - name=file_metadata["name"], - raw_data_location=file_metadata["file_path"], - extension=file_metadata["extension"], - mime_type=file_metadata["mime_type"] + id = data_id, + name = file_metadata["name"], + raw_data_location = file_metadata["file_path"], + extension = file_metadata["extension"], + mime_type = file_metadata["mime_type"] ) dataset.data.append(data_point) @@ -93,9 +93,9 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): send_telemetry("cognee.add EXECUTION STARTED", user_id=user.id) run_info = pipeline.run( data_resources(data, user), - table_name="file_metadata", - dataset_name=dataset_name, - write_disposition="merge", + table_name = "file_metadata", + dataset_name = dataset_name, + write_disposition = "merge", ) send_telemetry("cognee.add EXECUTION COMPLETED", user_id=user.id) From a2ff42332ed411512178ddc5f6ed798d5018db74 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Thu, 28 Nov 2024 11:49:28 +0100 Subject: [PATCH 27/52] DummyLLMAdapter WIP --- .../infrastructure/llm/anthropic/adapter.py | 15 ------ .../llm/generic_llm_api/adapter.py | 11 ----- cognee/infrastructure/llm/llm_interface.py | 14 +++++- cognee/infrastructure/llm/openai/adapter.py | 12 ----- .../llm/prompts/generate_cog_layers.txt | 14 ------ .../extraction/extract_cognitive_layers.py | 11 ----- profiling/util/DummyLLMAdapter.py | 47 +++++++++++++++++++ 7 files changed, 59 insertions(+), 65 deletions(-) delete mode 100644 cognee/infrastructure/llm/prompts/generate_cog_layers.txt delete mode 100644 cognee/modules/data/extraction/extract_cognitive_layers.py create mode 100644 profiling/util/DummyLLMAdapter.py diff --git a/cognee/infrastructure/llm/anthropic/adapter.py b/cognee/infrastructure/llm/anthropic/adapter.py index 8df59e3e5..7577bc12f 100644 --- a/cognee/infrastructure/llm/anthropic/adapter.py +++ b/cognee/infrastructure/llm/anthropic/adapter.py @@ -4,7 +4,6 @@ import instructor from tenacity import retry, stop_after_attempt import anthropic from cognee.infrastructure.llm.llm_interface import LLMInterface -from cognee.infrastructure.llm.prompts import read_query_prompt class AnthropicAdapter(LLMInterface): @@ -38,17 +37,3 @@ class AnthropicAdapter(LLMInterface): }], response_model = response_model, ) - - def show_prompt(self, text_input: str, system_prompt: str) -> str: - """Format and display the prompt for a user query.""" - - if not text_input: - text_input = "No user input provided." - if not system_prompt: - raise ValueError("No system prompt path provided.") - - system_prompt = read_query_prompt(system_prompt) - - formatted_prompt = f"""System Prompt:\n{system_prompt}\n\nUser Input:\n{text_input}\n""" if system_prompt else None - - return formatted_prompt diff --git a/cognee/infrastructure/llm/generic_llm_api/adapter.py b/cognee/infrastructure/llm/generic_llm_api/adapter.py index f65d559d5..5d1436ba0 100644 --- a/cognee/infrastructure/llm/generic_llm_api/adapter.py +++ b/cognee/infrastructure/llm/generic_llm_api/adapter.py @@ -6,7 +6,6 @@ import instructor from tenacity import retry, stop_after_attempt import openai from cognee.infrastructure.llm.llm_interface import LLMInterface -from cognee.infrastructure.llm.prompts import read_query_prompt from cognee.shared.data_models import MonitoringTool from cognee.base_config import get_base_config from cognee.infrastructure.llm.config import get_llm_config @@ -123,13 +122,3 @@ class GenericAPIAdapter(LLMInterface): response_model = response_model, ) - def show_prompt(self, text_input: str, system_prompt: str) -> str: - """Format and display the prompt for a user query.""" - if not text_input: - text_input = "No user input provided." - if not system_prompt: - raise ValueError("No system prompt path provided.") - system_prompt = read_query_prompt(system_prompt) - - formatted_prompt = f"""System Prompt:\n{system_prompt}\n\nUser Input:\n{text_input}\n""" if system_prompt else None - return formatted_prompt diff --git a/cognee/infrastructure/llm/llm_interface.py b/cognee/infrastructure/llm/llm_interface.py index f0c6db133..069efb22d 100644 --- a/cognee/infrastructure/llm/llm_interface.py +++ b/cognee/infrastructure/llm/llm_interface.py @@ -3,6 +3,8 @@ from typing import Type, Protocol from abc import abstractmethod from pydantic import BaseModel +from cognee.infrastructure.llm.prompts import read_query_prompt + class LLMInterface(Protocol): """ LLM Interface """ @@ -16,5 +18,13 @@ class LLMInterface(Protocol): @abstractmethod def show_prompt(self, text_input: str, system_prompt: str) -> str: - """To get structured output, import/call this function""" - raise NotImplementedError + """Format and display the prompt for a user query.""" + if not text_input: + text_input = "No user input provided." + if not system_prompt: + raise ValueError("No system prompt path provided.") + system_prompt = read_query_prompt(system_prompt) + + formatted_prompt = f"""System Prompt:\n{system_prompt}\n\nUser Input:\n{text_input}\n""" + + return formatted_prompt diff --git a/cognee/infrastructure/llm/openai/adapter.py b/cognee/infrastructure/llm/openai/adapter.py index 1dc9b70f5..e74cbbd33 100644 --- a/cognee/infrastructure/llm/openai/adapter.py +++ b/cognee/infrastructure/llm/openai/adapter.py @@ -8,7 +8,6 @@ import instructor from pydantic import BaseModel from cognee.infrastructure.llm.llm_interface import LLMInterface -from cognee.infrastructure.llm.prompts import read_query_prompt class OpenAIAdapter(LLMInterface): name = "OpenAI" @@ -121,14 +120,3 @@ class OpenAIAdapter(LLMInterface): max_tokens = 300, max_retries = 5, ) - - def show_prompt(self, text_input: str, system_prompt: str) -> str: - """Format and display the prompt for a user query.""" - if not text_input: - text_input = "No user input provided." - if not system_prompt: - raise ValueError("No system prompt path provided.") - system_prompt = read_query_prompt(system_prompt) - - formatted_prompt = f"""System Prompt:\n{system_prompt}\n\nUser Input:\n{text_input}\n""" if system_prompt else None - return formatted_prompt diff --git a/cognee/infrastructure/llm/prompts/generate_cog_layers.txt b/cognee/infrastructure/llm/prompts/generate_cog_layers.txt deleted file mode 100644 index 925588189..000000000 --- a/cognee/infrastructure/llm/prompts/generate_cog_layers.txt +++ /dev/null @@ -1,14 +0,0 @@ -You are tasked with analyzing `{{ data_type }}` files, especially in a multilayer network context for tasks such as analysis, categorization, and feature extraction. Various layers can be incorporated to capture the depth and breadth of information contained within the {{ data_type }}. - -These layers can help in understanding the content, context, and characteristics of the `{{ data_type }}`. - -Your objective is to extract meaningful layers of information that will contribute to constructing a detailed multilayer network or knowledge graph. - -Approach this task by considering the unique characteristics and inherent properties of the data at hand. - -VERY IMPORTANT: The context you are working in is `{{ category_name }}` and the specific domain you are extracting data on is `{{ category_name }}`. - -Guidelines for Layer Extraction: -Take into account: The content type, in this case, is: `{{ category_name }}`, should play a major role in how you decompose into layers. - -Based on your analysis, define and describe the layers you've identified, explaining their relevance and contribution to understanding the dataset. Your independent identification of layers will enable a nuanced and multifaceted representation of the data, enhancing applications in knowledge discovery, content analysis, and information retrieval. diff --git a/cognee/modules/data/extraction/extract_cognitive_layers.py b/cognee/modules/data/extraction/extract_cognitive_layers.py deleted file mode 100644 index 82e9e8a94..000000000 --- a/cognee/modules/data/extraction/extract_cognitive_layers.py +++ /dev/null @@ -1,11 +0,0 @@ -from typing import Type, Dict -from pydantic import BaseModel -from cognee.infrastructure.llm.prompts import render_prompt -from cognee.infrastructure.llm.get_llm_client import get_llm_client - -async def extract_cognitive_layers(content: str, category: Dict, response_model: Type[BaseModel]): - llm_client = get_llm_client() - - system_prompt = render_prompt("generate_cog_layers.txt", category) - - return await llm_client.acreate_structured_output(content, system_prompt, response_model) diff --git a/profiling/util/DummyLLMAdapter.py b/profiling/util/DummyLLMAdapter.py new file mode 100644 index 000000000..40698b938 --- /dev/null +++ b/profiling/util/DummyLLMAdapter.py @@ -0,0 +1,47 @@ +import spacy +import textacy +from typing import Type +from uuid import uuid4 +from pydantic import BaseModel +from cognee.infrastructure.llm.llm_interface import LLMInterface +from cognee.shared.data_models import SummarizedContent +from cognee.shared.data_models import KnowledgeGraph, Node, Edge + + +class DummyLLMAdapter(LLMInterface): + nlp = spacy.load('en_core_web_sm') + async def acreate_structured_output(self, + text_input: str, + system_prompt: str, + response_model: Type[BaseModel]) -> BaseModel: + + if isinstance(response_model, SummarizedContent): + return(dummy_summarize_content(text_input)) + elif isinstance(response_model, KnowledgeGraph): + return(dummy_extract_knowledge_graph(text_input, nlp)) + else: + raise Exception("Currently dummy acreate_structured_input is only implemented for SummarizedContent and KnowledgeGraph") + + +def dummy_extract_knowledge_graph(text, nlp): + doc = nlp(text) + triples = list(textacy.extract.subject_verb_object_triples(doc)) + + nodes = {} + edges = [] + for triple in triples: + source = "_".join([str(e) for e in triple.subject]) + target = "_".join([str(e) for e in triple.object]) + nodes[source] = nodes.get(source, Node(id=str(uuid4()), name=source, type="object", description="") ) + nodes[target] = nodes.get(target, Node(id=str(uuid4()), name=target, type="object", description="") ) + edge_type = "_".join([str(e) for e in triple.verb]) + edges.append(Edge(source_node_id=nodes[source].id, target_node_id=nodes[target].id, relationship_name=edge_type)) + return(KnowledgeGraph(nodes=list(nodes.keys()), edges=edges)) + + +def dummy_summarize_content(text): + words = [(word, len(word)) for word in set(text.split(" "))] + words = sorted(words, key=lambda x: x[1], reverse=True) + summary = " ".join([word for word, _ in words[:100]]) + description = " ".join([word for word, _ in words[:10]]) + return(SummarizedContent(summary=summary, description=description)) \ No newline at end of file From c094898d15d7a4b57f8d1e2e255ecdb2821b0bb0 Mon Sep 17 00:00:00 2001 From: hajdul88 <52442977+hajdul88@users.noreply.github.com> Date: Thu, 28 Nov 2024 12:12:36 +0100 Subject: [PATCH 28/52] fix: deletes duplicated retriever instances --- cognee/pipelines/__init__.py | 0 cognee/pipelines/retriever/__init__.py | 0 .../retriever/diffusion_retriever.py | 25 ---- cognee/pipelines/retriever/g_retriever.py | 25 ---- .../retriever/two_steps_retriever.py | 119 ------------------ 5 files changed, 169 deletions(-) delete mode 100644 cognee/pipelines/__init__.py delete mode 100644 cognee/pipelines/retriever/__init__.py delete mode 100644 cognee/pipelines/retriever/diffusion_retriever.py delete mode 100644 cognee/pipelines/retriever/g_retriever.py delete mode 100644 cognee/pipelines/retriever/two_steps_retriever.py diff --git a/cognee/pipelines/__init__.py b/cognee/pipelines/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/cognee/pipelines/retriever/__init__.py b/cognee/pipelines/retriever/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/cognee/pipelines/retriever/diffusion_retriever.py b/cognee/pipelines/retriever/diffusion_retriever.py deleted file mode 100644 index a6b79310e..000000000 --- a/cognee/pipelines/retriever/diffusion_retriever.py +++ /dev/null @@ -1,25 +0,0 @@ -from uuid import UUID -from enum import Enum -from typing import Callable, Dict -from cognee.shared.utils import send_telemetry -from cognee.modules.users.models import User -from cognee.modules.users.methods import get_default_user -from cognee.modules.users.permissions.methods import get_document_ids_for_user - -async def two_step_retriever(query: Dict[str, str], user: User = None) -> list: - if user is None: - user = await get_default_user() - - if user is None: - raise PermissionError("No user found in the system. Please create a user.") - - own_document_ids = await get_document_ids_for_user(user.id) - retrieved_results = await diffusion_retriever(query, user) - - filtered_search_results = [] - - - return retrieved_results - -async def diffusion_retriever(query: str, user, community_filter = []) -> list: - raise(NotImplementedError) \ No newline at end of file diff --git a/cognee/pipelines/retriever/g_retriever.py b/cognee/pipelines/retriever/g_retriever.py deleted file mode 100644 index 4b319acd9..000000000 --- a/cognee/pipelines/retriever/g_retriever.py +++ /dev/null @@ -1,25 +0,0 @@ -from uuid import UUID -from enum import Enum -from typing import Callable, Dict -from cognee.shared.utils import send_telemetry -from cognee.modules.users.models import User -from cognee.modules.users.methods import get_default_user -from cognee.modules.users.permissions.methods import get_document_ids_for_user - -async def two_step_retriever(query: Dict[str, str], user: User = None) -> list: - if user is None: - user = await get_default_user() - - if user is None: - raise PermissionError("No user found in the system. Please create a user.") - - own_document_ids = await get_document_ids_for_user(user.id) - retrieved_results = await g_retriever(query, user) - - filtered_search_results = [] - - - return retrieved_results - -async def g_retriever(query: str, user, community_filter = []) -> list: - raise(NotImplementedError) \ No newline at end of file diff --git a/cognee/pipelines/retriever/two_steps_retriever.py b/cognee/pipelines/retriever/two_steps_retriever.py deleted file mode 100644 index 92ef2be2e..000000000 --- a/cognee/pipelines/retriever/two_steps_retriever.py +++ /dev/null @@ -1,119 +0,0 @@ -import asyncio -from uuid import UUID -from enum import Enum -from typing import Callable, Dict -from cognee.shared.utils import send_telemetry -from cognee.modules.users.models import User -from cognee.modules.users.methods import get_default_user -from cognee.modules.users.permissions.methods import get_document_ids_for_user -from cognee.modules.graph.cognee_graph.CogneeGraph import CogneeGraph -from cognee.infrastructure.databases.vector import get_vector_engine -from cognee.infrastructure.databases.graph import get_graph_engine - - -def format_triplets(edges): - print("\n\n\n") - def filter_attributes(obj, attributes): - """Helper function to filter out non-None properties, including nested dicts.""" - result = {} - for attr in attributes: - value = getattr(obj, attr, None) - if value is not None: - # If the value is a dict, extract relevant keys from it - if isinstance(value, dict): - nested_values = {k: v for k, v in value.items() if k in attributes and v is not None} - result[attr] = nested_values - else: - result[attr] = value - return result - - triplets = [] - for edge in edges: - node1 = edge.node1 - node2 = edge.node2 - edge_attributes = edge.attributes - node1_attributes = node1.attributes - node2_attributes = node2.attributes - - # Filter only non-None properties - node1_info = {key: value for key, value in node1_attributes.items() if value is not None} - node2_info = {key: value for key, value in node2_attributes.items() if value is not None} - edge_info = {key: value for key, value in edge_attributes.items() if value is not None} - - # Create the formatted triplet - triplet = ( - f"Node1: {node1_info}\n" - f"Edge: {edge_info}\n" - f"Node2: {node2_info}\n\n\n" # Add three blank lines for separation - ) - triplets.append(triplet) - - return "".join(triplets) - - -async def two_step_retriever(query: Dict[str, str], user: User = None) -> list: - if user is None: - user = await get_default_user() - - if user is None: - raise PermissionError("No user found in the system. Please create a user.") - - own_document_ids = await get_document_ids_for_user(user.id) - retrieved_results = await run_two_step_retriever(query, user) - - filtered_search_results = [] - - return retrieved_results - - -def delete_duplicated_vector_db_elements(collections, results): #:TODO: This is just for now to fix vector db duplicates - results_dict = {} - for collection, results in zip(collections, results): - seen_ids = set() - unique_results = [] - for result in results: - if result.id not in seen_ids: - unique_results.append(result) - seen_ids.add(result.id) - else: - print(f"Duplicate found in collection '{collection}': {result.id}") - results_dict[collection] = unique_results - - return results_dict - - -async def run_two_step_retriever(query: str, user, community_filter = []) -> list: - vector_engine = get_vector_engine() - graph_engine = await get_graph_engine() - - collections = ["Entity_name", "TextSummary_text", 'EntityType_name', 'DocumentChunk_text'] - results = await asyncio.gather( - *[vector_engine.get_distances_of_collection(collection, query_text=query) for collection in collections] - ) - - ############################################# This part is a quick fix til we don't fix the vector db inconsistency - node_distances = delete_duplicated_vector_db_elements(collections, results)# :TODO: Change when vector db is fixed - # results_dict = {collection: result for collection, result in zip(collections, results)} - ############################################## - - memory_fragment = CogneeGraph() - - await memory_fragment.project_graph_from_db(graph_engine, - node_properties_to_project=['id', - 'description', - 'name', - 'type', - 'text'], - edge_properties_to_project=['id', - 'relationship_name']) - - await memory_fragment.map_vector_distances_to_graph_nodes(node_distances=node_distances) - - await memory_fragment.map_vector_distances_to_graph_edges(vector_engine, query)# :TODO: This should be coming from vector db - - results = await memory_fragment.calculate_top_triplet_importances(k=5) - - print(format_triplets(results)) - print(f'Query was the following:{query}' ) - - return results From 72a8bc43a1b6a1331a9c5cbee147fe2563133462 Mon Sep 17 00:00:00 2001 From: hajdul88 <52442977+hajdul88@users.noreply.github.com> Date: Thu, 28 Nov 2024 12:19:08 +0100 Subject: [PATCH 29/52] Deleting code_graph_pipeline not working entrypoint From now on eval_swe_bench contains and rung the updated version of the pipeline --- examples/python/code_graph_pipeline.py | 36 -------------------------- 1 file changed, 36 deletions(-) delete mode 100644 examples/python/code_graph_pipeline.py diff --git a/examples/python/code_graph_pipeline.py b/examples/python/code_graph_pipeline.py deleted file mode 100644 index 52c1e0474..000000000 --- a/examples/python/code_graph_pipeline.py +++ /dev/null @@ -1,36 +0,0 @@ -import asyncio -from cognee.modules.pipelines import Task, run_tasks -from cognee.tasks.repo_processor import ( - enrich_dependency_graph, - expand_dependency_graph, - get_repo_file_dependencies, -) -from cognee.tasks.storage import add_data_points -from cognee.tasks.summarization import summarize_code - - -async def print_results(pipeline): - async for result in pipeline: - print(result) - -if __name__ == "__main__": - ''' - parser = argparse.ArgumentParser(description="Process a file path") - parser.add_argument("path", help="Path to the file") - - args = parser.parse_args() - abspath = os.path.abspath(args.path or ".") - ''' - - abspath = '/Users/laszlohajdu/Documents/Github/RAW_GIT_REPOS/astropy__astropy-12907' - tasks = [ - Task(get_repo_file_dependencies), - Task(add_data_points), - Task(enrich_dependency_graph), - Task(expand_dependency_graph), - Task(add_data_points), - # Task(summarize_code, summarization_model = SummarizedContent), - ] - pipeline = run_tasks(tasks, abspath, "cognify_code_pipeline") - - asyncio.run(print_results(pipeline)) From 5c9fd44680730be365438131b9224b0d2423ac15 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Thu, 28 Nov 2024 12:26:01 +0100 Subject: [PATCH 30/52] Fix DummyLLMAdapter --- cognee/infrastructure/llm/llm_interface.py | 1 - profiling/util/DummyLLMAdapter.py | 66 ++++++++++++++-------- 2 files changed, 42 insertions(+), 25 deletions(-) diff --git a/cognee/infrastructure/llm/llm_interface.py b/cognee/infrastructure/llm/llm_interface.py index 069efb22d..4a5bd79b6 100644 --- a/cognee/infrastructure/llm/llm_interface.py +++ b/cognee/infrastructure/llm/llm_interface.py @@ -16,7 +16,6 @@ class LLMInterface(Protocol): """To get structured output, import/call this function""" raise NotImplementedError - @abstractmethod def show_prompt(self, text_input: str, system_prompt: str) -> str: """Format and display the prompt for a user query.""" if not text_input: diff --git a/profiling/util/DummyLLMAdapter.py b/profiling/util/DummyLLMAdapter.py index 40698b938..df81ce123 100644 --- a/profiling/util/DummyLLMAdapter.py +++ b/profiling/util/DummyLLMAdapter.py @@ -1,26 +1,34 @@ -import spacy -import textacy from typing import Type from uuid import uuid4 + +import spacy +import textacy from pydantic import BaseModel + from cognee.infrastructure.llm.llm_interface import LLMInterface -from cognee.shared.data_models import SummarizedContent -from cognee.shared.data_models import KnowledgeGraph, Node, Edge +from cognee.shared.data_models import Edge, KnowledgeGraph, Node, SummarizedContent class DummyLLMAdapter(LLMInterface): - nlp = spacy.load('en_core_web_sm') - async def acreate_structured_output(self, - text_input: str, - system_prompt: str, - response_model: Type[BaseModel]) -> BaseModel: - - if isinstance(response_model, SummarizedContent): - return(dummy_summarize_content(text_input)) - elif isinstance(response_model, KnowledgeGraph): - return(dummy_extract_knowledge_graph(text_input, nlp)) + nlp = spacy.load("en_core_web_sm") + + async def acreate_structured_output( + self, text_input: str, system_prompt: str, response_model: Type[BaseModel] + ) -> BaseModel: + + if ( + str(response_model) + == "" + ): + return dummy_summarize_content(text_input) + elif ( + str(response_model) == "" + ): + return dummy_extract_knowledge_graph(text_input, self.nlp) else: - raise Exception("Currently dummy acreate_structured_input is only implemented for SummarizedContent and KnowledgeGraph") + raise Exception( + "Currently dummy acreate_structured_input is only implemented for SummarizedContent and KnowledgeGraph" + ) def dummy_extract_knowledge_graph(text, nlp): @@ -31,17 +39,27 @@ def dummy_extract_knowledge_graph(text, nlp): edges = [] for triple in triples: source = "_".join([str(e) for e in triple.subject]) - target = "_".join([str(e) for e in triple.object]) - nodes[source] = nodes.get(source, Node(id=str(uuid4()), name=source, type="object", description="") ) - nodes[target] = nodes.get(target, Node(id=str(uuid4()), name=target, type="object", description="") ) + target = "_".join([str(e) for e in triple.object]) + nodes[source] = nodes.get( + source, Node(id=str(uuid4()), name=source, type="object", description="") + ) + nodes[target] = nodes.get( + target, Node(id=str(uuid4()), name=target, type="object", description="") + ) edge_type = "_".join([str(e) for e in triple.verb]) - edges.append(Edge(source_node_id=nodes[source].id, target_node_id=nodes[target].id, relationship_name=edge_type)) - return(KnowledgeGraph(nodes=list(nodes.keys()), edges=edges)) - + edges.append( + Edge( + source_node_id=nodes[source].id, + target_node_id=nodes[target].id, + relationship_name=edge_type, + ) + ) + return KnowledgeGraph(nodes=list(nodes.values()), edges=edges) + def dummy_summarize_content(text): words = [(word, len(word)) for word in set(text.split(" "))] words = sorted(words, key=lambda x: x[1], reverse=True) - summary = " ".join([word for word, _ in words[:100]]) - description = " ".join([word for word, _ in words[:10]]) - return(SummarizedContent(summary=summary, description=description)) \ No newline at end of file + summary = " ".join([word for word, _ in words[:50]]) + description = " ".join([word for word, _ in words[:10]]) + return SummarizedContent(summary=summary, description=description) From 3e1949d895f1450a3b8a436eb9491c11e60daad8 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Thu, 28 Nov 2024 15:42:20 +0100 Subject: [PATCH 31/52] Remove unnecessary nesting in embed_text and add DummyEmbeddingEngine --- .../embeddings/LiteLLMEmbeddingEngine.py | 29 ++++++++----------- profiling/util/DummyEmbeddingEngine.py | 9 ++++++ 2 files changed, 21 insertions(+), 17 deletions(-) create mode 100644 profiling/util/DummyEmbeddingEngine.py diff --git a/cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py b/cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py index de30640e5..edc8eb57f 100644 --- a/cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py +++ b/cognee/infrastructure/databases/vector/embeddings/LiteLLMEmbeddingEngine.py @@ -28,24 +28,19 @@ class LiteLLMEmbeddingEngine(EmbeddingEngine): self.dimensions = dimensions async def embed_text(self, text: List[str]) -> List[List[float]]: - async def get_embedding(text_): - try: - response = await litellm.aembedding( - self.model, - input = text_, - api_key = self.api_key, - api_base = self.endpoint, - api_version = self.api_version - ) - except litellm.exceptions.BadRequestError as error: - logger.error("Error embedding text: %s", str(error)) - raise error + try: + response = await litellm.aembedding( + self.model, + input = text, + api_key = self.api_key, + api_base = self.endpoint, + api_version = self.api_version + ) + except litellm.exceptions.BadRequestError as error: + logger.error("Error embedding text: %s", str(error)) + raise error - return [data["embedding"] for data in response.data] - - # tasks = [get_embedding(text_) for text_ in text] - result = await get_embedding(text) - return result + return [data["embedding"] for data in response.data] def get_vector_size(self) -> int: return self.dimensions diff --git a/profiling/util/DummyEmbeddingEngine.py b/profiling/util/DummyEmbeddingEngine.py new file mode 100644 index 000000000..7f5b3e847 --- /dev/null +++ b/profiling/util/DummyEmbeddingEngine.py @@ -0,0 +1,9 @@ +import numpy as np +from cognee.infrastructure.databases.vector.embeddings.EmbeddingEngine import EmbeddingEngine + +class DummyEmbeddingEngine(EmbeddingEngine): + async def embed_text(self, text: list[str]) -> list[list[float]]: + return(list(list(np.random.randn(3072)))) + + def get_vector_size(self) -> int: + return(3072) From 8edfe7c5a4e27c4b499ce98c164ce84342057bc0 Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Thu, 28 Nov 2024 16:52:54 +0100 Subject: [PATCH 32/52] feat/connect code graph pipeline to benchmarking --- evals/eval_swe_bench.py | 69 ++++++++++++++++++++++------------------- 1 file changed, 37 insertions(+), 32 deletions(-) diff --git a/evals/eval_swe_bench.py b/evals/eval_swe_bench.py index 1dd0e58ab..c16e821fa 100644 --- a/evals/eval_swe_bench.py +++ b/evals/eval_swe_bench.py @@ -8,26 +8,35 @@ from swebench.harness.utils import load_swebench_dataset from swebench.inference.make_datasets.create_instance import PATCH_EXAMPLE import cognee - -from cognee.shared.data_models import SummarizedContent -from cognee.shared.utils import render_graph -from cognee.tasks.repo_processor import ( - enrich_dependency_graph, - expand_dependency_graph, - get_repo_file_dependencies, -) -from cognee.tasks.storage import add_data_points -from cognee.tasks.summarization import summarize_code -from cognee.modules.pipelines import Task, run_tasks from cognee.api.v1.cognify.code_graph_pipeline import code_graph_pipeline from cognee.api.v1.search import SearchType from cognee.infrastructure.databases.graph import get_graph_engine from cognee.infrastructure.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.prompts import read_query_prompt -from evals.eval_utils import download_instances -from evals.eval_utils import ingest_repos -from evals.eval_utils import download_github_repo -from evals.eval_utils import delete_repo +from cognee.modules.pipelines import Task, run_tasks +from cognee.modules.retrieval.brute_force_triplet_search import \ + brute_force_triplet_search +from cognee.shared.data_models import SummarizedContent +from cognee.shared.utils import render_graph +from cognee.tasks.repo_processor import (enrich_dependency_graph, + expand_dependency_graph, + get_repo_file_dependencies) +from cognee.tasks.storage import add_data_points +from cognee.tasks.summarization import summarize_code +from evals.eval_utils import (delete_repo, download_github_repo, + download_instances, ingest_repos) + + +def node_to_string(node): + text = node.attributes["text"] + return f"Node({node.id}, {text})" +def retrieved_edges_to_string(retrieved_edges): + edge_strings = [] + for edge in retrieved_edges: + relationship_type = edge.attributes["relationship_type"] + edge_str = f"{node_to_string(edge.node1)} {relationship_type} {node_to_string(edge.node2)}" + edge_strings.append(edge_str) + return "\n".join(edge_strings) async def generate_patch_with_cognee(instance): await cognee.prune.prune_data() @@ -39,19 +48,18 @@ async def generate_patch_with_cognee(instance): # repo_path = download_github_repo(instance, '../RAW_GIT_REPOS') - repo_path = '/Users/borisarzentar/Projects/graphrag' - + repo_path = '../minimal_repo' tasks = [ Task(get_repo_file_dependencies), Task(add_data_points, task_config = { "batch_size": 50 }), Task(enrich_dependency_graph, task_config = { "batch_size": 50 }), Task(expand_dependency_graph, task_config = { "batch_size": 50 }), Task(add_data_points, task_config = { "batch_size": 50 }), - # Task(summarize_code, summarization_model = SummarizedContent), + Task(summarize_code, summarization_model = SummarizedContent), ] pipeline = run_tasks(tasks, repo_path, "cognify_code_pipeline") - + async for result in pipeline: print(result) @@ -62,29 +70,27 @@ async def generate_patch_with_cognee(instance): problem_statement = instance['problem_statement'] instructions = read_query_prompt("patch_gen_instructions.txt") - graph_str = 'HERE WE SHOULD PASS THE TRIPLETS FROM GRAPHRAG' + retrieved_edges = await brute_force_triplet_search(problem_statement, top_k = 3) + + retrieved_edges_str = retrieved_edges_to_string(retrieved_edges) prompt = "\n".join([ - instructions, "", PATCH_EXAMPLE, "", - "This is the knowledge graph:", - graph_str + "These are the retrieved edges:", + retrieved_edges_str ]) - return 0 - - ''' :TODO: We have to find out how do we do the generation llm_client = get_llm_client() answer_prediction = await llm_client.acreate_structured_output( - text_input=problem_statement, - system_prompt=prompt, + text_input=prompt, + system_prompt=instructions, response_model=str, ) return answer_prediction - ''' + async def generate_patch_without_cognee(instance): problem_statement = instance['problem_statement'] @@ -111,12 +117,11 @@ async def get_preds(dataset, with_cognee=True): for instance in dataset: await pred_func(instance) - ''' preds = [{"instance_id": instance["instance_id"], "model_patch": await pred_func(instance), "model_name_or_path": model_name} for instance in dataset] - ''' - return 0 + + return preds async def main(): From 996b3a658b3570e76fcdf64b903a9c7a8d31b78e Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Thu, 28 Nov 2024 16:53:33 +0100 Subject: [PATCH 33/52] add custom metric implementation --- evals/deepeval_metrics.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 evals/deepeval_metrics.py diff --git a/evals/deepeval_metrics.py b/evals/deepeval_metrics.py new file mode 100644 index 000000000..03f9f6dba --- /dev/null +++ b/evals/deepeval_metrics.py @@ -0,0 +1,14 @@ +from deepeval.metrics import GEval +from deepeval.test_case import LLMTestCaseParams + +correctness_metric = GEval( + name="Correctness", + model="gpt-4o-mini", + evaluation_params=[ + LLMTestCaseParams.ACTUAL_OUTPUT, + LLMTestCaseParams.EXPECTED_OUTPUT + ], + evaluation_steps=[ + "Determine whether the actual output is factually correct based on the expected output." + ] + ) From 1bfa3a0ea317c79f36fe817adcf96abf0a24f88e Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Fri, 29 Nov 2024 11:30:30 +0100 Subject: [PATCH 34/52] Rebase onto code-graph --- evals/EC2_README.md | 34 +++++++ evals/cloud/setup_ubuntu_instance.sh | 43 +++++++++ evals/eval_swe_bench.py | 127 ++++++++++++++++++--------- 3 files changed, 163 insertions(+), 41 deletions(-) create mode 100644 evals/EC2_README.md create mode 100644 evals/cloud/setup_ubuntu_instance.sh diff --git a/evals/EC2_README.md b/evals/EC2_README.md new file mode 100644 index 000000000..50a92bc27 --- /dev/null +++ b/evals/EC2_README.md @@ -0,0 +1,34 @@ +Create an EC2 Instance with the + +`Ubuntu Image` + +Many instance types will work, we used: + +`m7a.2xlarge` # more than 8 parallel processes doesn't seem to speed up overall process. Maybe to do with docker parallelism? + +DON'T FORGET TO ADD + +`500 GB storage` + +Or the evaluation run will run out of space + +-------------------------------------------------------- + +Then ssh into the instance, run + +source evals/cloud/setup_ubuntu_instance.sh + +sudo usermod -aG docker $USER + +disconnect, and reconnect. + +Then enter a `screen` and activate the virtual env + +screen +source venv/bin/activate + +then, from cognee, you can run swe_bench: + +python evals/eval_swe_bench --cognee_off --max_workers=N_CPUS + +Building the environment images takes roughly 17 minutes \ No newline at end of file diff --git a/evals/cloud/setup_ubuntu_instance.sh b/evals/cloud/setup_ubuntu_instance.sh new file mode 100644 index 000000000..e5386c372 --- /dev/null +++ b/evals/cloud/setup_ubuntu_instance.sh @@ -0,0 +1,43 @@ + +sudo apt-get update +sudo apt-get install ca-certificates curl +sudo install -m 0755 -d /etc/apt/keyrings +sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc +sudo chmod a+r /etc/apt/keyrings/docker.asc + +# Add the repository to Apt sources: +echo \ + "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ + $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ + sudo tee /etc/apt/sources.list.d/docker.list > /dev/null +sudo apt-get update + +sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin + +sudo docker run hello-world + +sudo apt install unzip + +sudo apt-get install python3-virtualenv + +sudo add-apt-repository ppa:deadsnakes/ppa +sudo apt update + +sudo apt install python3.11 + +virtualenv venv --python=python3.11 + +source venv/bin/activate + +pip install poetry + +poetry install + +pip install swebench transformers sentencepiece + +groups | grep docker + +python evals/eval_swe_bench.py --cognee_off + +sudo usermod -aG docker $USER + diff --git a/evals/eval_swe_bench.py b/evals/eval_swe_bench.py index 1dd0e58ab..5cbea58ee 100644 --- a/evals/eval_swe_bench.py +++ b/evals/eval_swe_bench.py @@ -1,6 +1,7 @@ import argparse import json import subprocess +import sys from pathlib import Path from datasets import Dataset @@ -29,7 +30,28 @@ from evals.eval_utils import ingest_repos from evals.eval_utils import download_github_repo from evals.eval_utils import delete_repo -async def generate_patch_with_cognee(instance): + +def check_install_package(package_name): + """ + Check if a pip package is installed and install it if not. + Returns True if package is/was installed successfully, False otherwise. + """ + try: + __import__(package_name) + return True + except ImportError: + try: + subprocess.check_call( + [sys.executable, "-m", "pip", "install", package_name] + ) + return True + except subprocess.CalledProcessError: + return False + + + +async def generate_patch_with_cognee(instance, search_type=SearchType.CHUNKS): + await cognee.prune.prune_data() await cognee.prune.prune_system() @@ -59,23 +81,22 @@ async def generate_patch_with_cognee(instance): await render_graph(None, include_labels = True, include_nodes = True) - problem_statement = instance['problem_statement'] + problem_statement = instance["problem_statement"] instructions = read_query_prompt("patch_gen_instructions.txt") graph_str = 'HERE WE SHOULD PASS THE TRIPLETS FROM GRAPHRAG' - prompt = "\n".join([ - instructions, - "", - PATCH_EXAMPLE, - "", - "This is the knowledge graph:", - graph_str - ]) + prompt = "\n".join( + [ + instructions, + "", + PATCH_EXAMPLE, + "", + "This is the knowledge graph:", + graph_str, + ] + ) - return 0 - - ''' :TODO: We have to find out how do we do the generation llm_client = get_llm_client() answer_prediction = await llm_client.acreate_structured_output( text_input=problem_statement, @@ -84,13 +105,11 @@ async def generate_patch_with_cognee(instance): ) return answer_prediction - ''' -async def generate_patch_without_cognee(instance): - problem_statement = instance['problem_statement'] +async def generate_patch_without_cognee(instance, llm_client): + problem_statement = instance["problem_statement"] prompt = instance["text"] - llm_client = get_llm_client() answer_prediction = await llm_client.acreate_structured_output( text_input=problem_statement, system_prompt=prompt, @@ -100,43 +119,56 @@ async def generate_patch_without_cognee(instance): async def get_preds(dataset, with_cognee=True): + llm_client = get_llm_client() + if with_cognee: model_name = "with_cognee" - pred_func = generate_patch_with_cognee + futures = [ + (instance["instance_id"], generate_patch_with_cognee(instance)) + for instance in dataset + ] else: model_name = "without_cognee" - pred_func = generate_patch_without_cognee + futures = [ + (instance["instance_id"], generate_patch_without_cognee(instance, llm_client)) + for instance in dataset + ] + model_patches = await asyncio.gather(*[x[1] for x in futures]) + preds = [ + { + "instance_id": instance_id, + "model_patch": model_patch, + "model_name_or_path": model_name, + } + for (instance_id, _), model_patch in zip(futures, model_patches) + ] - for instance in dataset: - await pred_func(instance) - - ''' - preds = [{"instance_id": instance["instance_id"], - "model_patch": await pred_func(instance), - "model_name_or_path": model_name} for instance in dataset] - ''' - return 0 + return preds async def main(): parser = argparse.ArgumentParser( - description="Run LLM predictions on SWE-bench dataset") - parser.add_argument('--cognee_off', action='store_true') + description="Run LLM predictions on SWE-bench dataset" + ) + parser.add_argument("--cognee_off", action="store_true") + parser.add_argument("--max_workers", type=int, required=True) args = parser.parse_args() + for dependency in ["transformers", "sentencepiece", "swebench"]: + check_install_package(dependency) + if args.cognee_off: - dataset_name = 'princeton-nlp/SWE-bench_Lite_bm25_13K' - dataset = load_swebench_dataset(dataset_name, split='test') + dataset_name = "princeton-nlp/SWE-bench_Lite_bm25_13K" + dataset = load_swebench_dataset(dataset_name, split="test") predictions_path = "preds_nocognee.json" if not Path(predictions_path).exists(): preds = await get_preds(dataset, with_cognee=False) with open(predictions_path, "w") as file: json.dump(preds, file) else: - dataset_name = 'princeton-nlp/SWE-bench_Lite' - swe_dataset = load_swebench_dataset( - dataset_name, split='test')[:1] + dataset_name = "princeton-nlp/SWE-bench_Lite" + swe_dataset = load_swebench_dataset(dataset_name, split="test")[:1] filepath = Path("SWE-bench_testsample") if filepath.exists(): dataset = Dataset.load_from_disk(filepath) @@ -147,12 +179,25 @@ async def main(): with open(predictions_path, "w") as file: json.dump(preds, file) - subprocess.run(["python", "-m", "swebench.harness.run_evaluation", - "--dataset_name", dataset_name, - "--split", "test", - "--predictions_path", predictions_path, - "--max_workers", "1", - "--run_id", "test_run"]) + + subprocess.run( + [ + "python", + "-m", + "swebench.harness.run_evaluation", + "--dataset_name", + dataset_name, + "--split", + "test", + "--predictions_path", + predictions_path, + "--max_workers", + str(args.max_workers), + "--run_id", + "test_run", + ] + ) + if __name__ == "__main__": import asyncio From 5036f3a85f22b84bf2dd9c3ee0aa4ba56de0ff32 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Tue, 26 Nov 2024 11:18:36 +0100 Subject: [PATCH 35/52] Add -y to setup_ubuntu_instance.sh commands and update EC2_README --- evals/EC2_README.md | 44 +++++++++++++++++++++++----- evals/cloud/setup_ubuntu_instance.sh | 38 +++++++++--------------- 2 files changed, 51 insertions(+), 31 deletions(-) diff --git a/evals/EC2_README.md b/evals/EC2_README.md index 50a92bc27..8e3dccb11 100644 --- a/evals/EC2_README.md +++ b/evals/EC2_README.md @@ -1,3 +1,5 @@ +## Creating the EC2 Instance + Create an EC2 Instance with the `Ubuntu Image` @@ -12,23 +14,51 @@ DON'T FORGET TO ADD Or the evaluation run will run out of space --------------------------------------------------------- +Add a key pair login where you have access to the corresponding key file (*.pem) -Then ssh into the instance, run +## Accessing your instance and setup -source evals/cloud/setup_ubuntu_instance.sh +To ssh into the instance, you have to save your key pair file (*.pem) to an appropriate location, such as ~/.aws. After launching the instance, you can access the Instance Summary, and retrieve "Public IPv4 DNS" address. Then run + +`ssh -i PATH_TO_KEY ubuntu@IPv4ADDRESS` + +to gain command line access to the instance. + +To copy your current state of cognee, go to the folder that contains "cognee" on your local machine, zip it to cognee.zip and run: + +`zip -r cognee.zip cognee` +`scp -i PATH_TO_KEY cognee.zip ubuntu@IPv4ADDRESS:cognee.zip` + +And unzip cognee.zip in your SSH session: + +`sudo apt install unzip` +`unzip cognee.zip` + +Then run: +`cd cognee` +`source evals/cloud/setup_ubuntu_instance.sh` sudo usermod -aG docker $USER disconnect, and reconnect. +Confirm that `ubuntu` has been added to the docker user group with + +`groups | grep docker` + +## Running SWE-bench + Then enter a `screen` and activate the virtual env -screen -source venv/bin/activate +`screen` +`source venv/bin/activate` then, from cognee, you can run swe_bench: -python evals/eval_swe_bench --cognee_off --max_workers=N_CPUS +`cd cognee` -Building the environment images takes roughly 17 minutes \ No newline at end of file +`python evals/eval_swe_bench.py --cognee_off --max_workers=N_CPUS` + +Building the environment images should take roughly 17 minutes + +If the virtual env wasn't set up correctly for some reason, just run the last few lines of `setup_ubuntu_instance.sh` manually \ No newline at end of file diff --git a/evals/cloud/setup_ubuntu_instance.sh b/evals/cloud/setup_ubuntu_instance.sh index e5386c372..e05b761e2 100644 --- a/evals/cloud/setup_ubuntu_instance.sh +++ b/evals/cloud/setup_ubuntu_instance.sh @@ -1,43 +1,33 @@ - -sudo apt-get update -sudo apt-get install ca-certificates curl +sudo apt-get update -y +sudo apt-get install -y ca-certificates curl sudo install -m 0755 -d /etc/apt/keyrings sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc sudo chmod a+r /etc/apt/keyrings/docker.asc # Add the repository to Apt sources: echo \ - "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ - $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ - sudo tee /etc/apt/sources.list.d/docker.list > /dev/null -sudo apt-get update +"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ + $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ +sudo tee /etc/apt/sources.list.d/docker.list > /dev/null +sudo apt-get update -y -sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin +sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin sudo docker run hello-world -sudo apt install unzip +sudo apt install -y unzip -sudo apt-get install python3-virtualenv +sudo apt-get install -y python3-virtualenv -sudo add-apt-repository ppa:deadsnakes/ppa -sudo apt update +sudo add-apt-repository -y ppa:deadsnakes/ppa -sudo apt install python3.11 +sudo apt update -y + +sudo apt install -y python3.11 virtualenv venv --python=python3.11 source venv/bin/activate - pip install poetry - poetry install - -pip install swebench transformers sentencepiece - -groups | grep docker - -python evals/eval_swe_bench.py --cognee_off - -sudo usermod -aG docker $USER - +pip install swebench transformers sentencepiece datasets tiktoken protobuf From 618d476c301e1c30bf0f7b505b117d1c918e1473 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Tue, 26 Nov 2024 11:20:07 +0100 Subject: [PATCH 36/52] Add code formating to usermod command --- evals/EC2_README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/evals/EC2_README.md b/evals/EC2_README.md index 8e3dccb11..d6a937ed7 100644 --- a/evals/EC2_README.md +++ b/evals/EC2_README.md @@ -38,7 +38,7 @@ Then run: `cd cognee` `source evals/cloud/setup_ubuntu_instance.sh` -sudo usermod -aG docker $USER +`sudo usermod -aG docker $USER` disconnect, and reconnect. From b46af5a6f6d049fde2ffc66992ec9e9a530d8fff Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Wed, 27 Nov 2024 12:12:12 +0100 Subject: [PATCH 37/52] Update eval_swe_bench --- evals/eval_swe_bench.py | 41 ++++++++++++++++++++++++++++------------- 1 file changed, 28 insertions(+), 13 deletions(-) diff --git a/evals/eval_swe_bench.py b/evals/eval_swe_bench.py index 5cbea58ee..980be4bd2 100644 --- a/evals/eval_swe_bench.py +++ b/evals/eval_swe_bench.py @@ -26,9 +26,6 @@ from cognee.infrastructure.databases.graph import get_graph_engine from cognee.infrastructure.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.prompts import read_query_prompt from evals.eval_utils import download_instances -from evals.eval_utils import ingest_repos -from evals.eval_utils import download_github_repo -from evals.eval_utils import delete_repo def check_install_package(package_name): @@ -49,8 +46,14 @@ def check_install_package(package_name): return False +<<<<<<< HEAD async def generate_patch_with_cognee(instance, search_type=SearchType.CHUNKS): +======= +async def generate_patch_with_cognee( + instance, search_type=SearchType.CHUNKS +): +>>>>>>> c4e3634 (Update eval_swe_bench) await cognee.prune.prune_data() await cognee.prune.prune_system() @@ -81,7 +84,7 @@ async def generate_patch_with_cognee(instance, search_type=SearchType.CHUNKS): await render_graph(None, include_labels = True, include_nodes = True) - problem_statement = instance["problem_statement"] + problem_statement = instance['problem_statement'] instructions = read_query_prompt("patch_gen_instructions.txt") graph_str = 'HERE WE SHOULD PASS THE TRIPLETS FROM GRAPHRAG' @@ -97,7 +100,6 @@ async def generate_patch_with_cognee(instance, search_type=SearchType.CHUNKS): ] ) - llm_client = get_llm_client() answer_prediction = await llm_client.acreate_structured_output( text_input=problem_statement, system_prompt=prompt, @@ -106,8 +108,9 @@ async def generate_patch_with_cognee(instance, search_type=SearchType.CHUNKS): return answer_prediction + async def generate_patch_without_cognee(instance, llm_client): - problem_statement = instance["problem_statement"] + problem_statement = instance['problem_statement'] prompt = instance["text"] answer_prediction = await llm_client.acreate_structured_output( @@ -134,7 +137,10 @@ async def get_preds(dataset, with_cognee=True): for instance in dataset ] model_patches = await asyncio.gather(*[x[1] for x in futures]) +<<<<<<< HEAD +======= +>>>>>>> c4e3634 (Update eval_swe_bench) preds = [ { "instance_id": instance_id, @@ -149,9 +155,8 @@ async def get_preds(dataset, with_cognee=True): async def main(): parser = argparse.ArgumentParser( - description="Run LLM predictions on SWE-bench dataset" - ) - parser.add_argument("--cognee_off", action="store_true") + description="Run LLM predictions on SWE-bench dataset") + parser.add_argument('--cognee_off', action='store_true') parser.add_argument("--max_workers", type=int, required=True) args = parser.parse_args() @@ -159,16 +164,17 @@ async def main(): check_install_package(dependency) if args.cognee_off: - dataset_name = "princeton-nlp/SWE-bench_Lite_bm25_13K" - dataset = load_swebench_dataset(dataset_name, split="test") + dataset_name = 'princeton-nlp/SWE-bench_Lite_bm25_13K' + dataset = load_swebench_dataset(dataset_name, split='test') predictions_path = "preds_nocognee.json" if not Path(predictions_path).exists(): preds = await get_preds(dataset, with_cognee=False) with open(predictions_path, "w") as file: json.dump(preds, file) else: - dataset_name = "princeton-nlp/SWE-bench_Lite" - swe_dataset = load_swebench_dataset(dataset_name, split="test")[:1] + dataset_name = 'princeton-nlp/SWE-bench_Lite' + swe_dataset = load_swebench_dataset( + dataset_name, split='test')[:1] filepath = Path("SWE-bench_testsample") if filepath.exists(): dataset = Dataset.load_from_disk(filepath) @@ -179,6 +185,7 @@ async def main(): with open(predictions_path, "w") as file: json.dump(preds, file) +<<<<<<< HEAD subprocess.run( [ @@ -198,6 +205,14 @@ async def main(): ] ) +======= + subprocess.run(["python", "-m", "swebench.harness.run_evaluation", + "--dataset_name", dataset_name, + "--split", "test", + "--predictions_path", predictions_path, + "--max_workers", "1", + "--run_id", "test_run"]) +>>>>>>> c4e3634 (Update eval_swe_bench) if __name__ == "__main__": import asyncio From d9fc740ec0a8f9d84d1e9a3fabc5fa3edfa84e40 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Fri, 29 Nov 2024 11:33:05 +0100 Subject: [PATCH 38/52] Fix merge conflicts --- evals/eval_swe_bench.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/evals/eval_swe_bench.py b/evals/eval_swe_bench.py index 980be4bd2..45b83970a 100644 --- a/evals/eval_swe_bench.py +++ b/evals/eval_swe_bench.py @@ -45,15 +45,7 @@ def check_install_package(package_name): except subprocess.CalledProcessError: return False - -<<<<<<< HEAD - async def generate_patch_with_cognee(instance, search_type=SearchType.CHUNKS): -======= -async def generate_patch_with_cognee( - instance, search_type=SearchType.CHUNKS -): ->>>>>>> c4e3634 (Update eval_swe_bench) await cognee.prune.prune_data() await cognee.prune.prune_system() @@ -137,10 +129,7 @@ async def get_preds(dataset, with_cognee=True): for instance in dataset ] model_patches = await asyncio.gather(*[x[1] for x in futures]) -<<<<<<< HEAD -======= ->>>>>>> c4e3634 (Update eval_swe_bench) preds = [ { "instance_id": instance_id, @@ -185,7 +174,6 @@ async def main(): with open(predictions_path, "w") as file: json.dump(preds, file) -<<<<<<< HEAD subprocess.run( [ @@ -205,14 +193,6 @@ async def main(): ] ) -======= - subprocess.run(["python", "-m", "swebench.harness.run_evaluation", - "--dataset_name", dataset_name, - "--split", "test", - "--predictions_path", predictions_path, - "--max_workers", "1", - "--run_id", "test_run"]) ->>>>>>> c4e3634 (Update eval_swe_bench) if __name__ == "__main__": import asyncio From a5ae9185cd397f9526627023bf7f98453b35df8f Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Fri, 29 Nov 2024 11:40:51 +0100 Subject: [PATCH 39/52] Replicate PR 33 --- .../llm/prompts/patch_gen_instructions.txt | 3 +- .../llm/prompts/patch_gen_kg_instructions.txt | 3 ++ evals/eval_swe_bench.py | 32 +++++++++---------- 3 files changed, 19 insertions(+), 19 deletions(-) create mode 100644 cognee/infrastructure/llm/prompts/patch_gen_kg_instructions.txt diff --git a/cognee/infrastructure/llm/prompts/patch_gen_instructions.txt b/cognee/infrastructure/llm/prompts/patch_gen_instructions.txt index 1553753ab..5e7e48dda 100644 --- a/cognee/infrastructure/llm/prompts/patch_gen_instructions.txt +++ b/cognee/infrastructure/llm/prompts/patch_gen_instructions.txt @@ -1,3 +1,2 @@ -I need you to solve this issue by looking at the provided knowledge graph and -generating a single patch file that I can apply directly to this repository using git apply. +I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. \ No newline at end of file diff --git a/cognee/infrastructure/llm/prompts/patch_gen_kg_instructions.txt b/cognee/infrastructure/llm/prompts/patch_gen_kg_instructions.txt new file mode 100644 index 000000000..1553753ab --- /dev/null +++ b/cognee/infrastructure/llm/prompts/patch_gen_kg_instructions.txt @@ -0,0 +1,3 @@ +I need you to solve this issue by looking at the provided knowledge graph and +generating a single patch file that I can apply directly to this repository using git apply. +Please respond with a single patch file in the following format. \ No newline at end of file diff --git a/evals/eval_swe_bench.py b/evals/eval_swe_bench.py index 45b83970a..4a59457e1 100644 --- a/evals/eval_swe_bench.py +++ b/evals/eval_swe_bench.py @@ -45,7 +45,7 @@ def check_install_package(package_name): except subprocess.CalledProcessError: return False -async def generate_patch_with_cognee(instance, search_type=SearchType.CHUNKS): +async def generate_patch_with_cognee(instance, llm_client, search_type=SearchType.CHUNKS): await cognee.prune.prune_data() await cognee.prune.prune_system() @@ -77,13 +77,13 @@ async def generate_patch_with_cognee(instance, search_type=SearchType.CHUNKS): await render_graph(None, include_labels = True, include_nodes = True) problem_statement = instance['problem_statement'] - instructions = read_query_prompt("patch_gen_instructions.txt") + instructions = read_query_prompt("patch_gen_kg_instructions.txt") graph_str = 'HERE WE SHOULD PASS THE TRIPLETS FROM GRAPHRAG' prompt = "\n".join( [ - instructions, + problem_statement, "", PATCH_EXAMPLE, "", @@ -93,8 +93,8 @@ async def generate_patch_with_cognee(instance, search_type=SearchType.CHUNKS): ) answer_prediction = await llm_client.acreate_structured_output( - text_input=problem_statement, - system_prompt=prompt, + text_input=prompt, + system_prompt=instructions, response_model=str, ) @@ -102,12 +102,11 @@ async def generate_patch_with_cognee(instance, search_type=SearchType.CHUNKS): async def generate_patch_without_cognee(instance, llm_client): - problem_statement = instance['problem_statement'] - prompt = instance["text"] + instructions = read_query_prompt("patch_gen_instructions.txt") answer_prediction = await llm_client.acreate_structured_output( - text_input=problem_statement, - system_prompt=prompt, + text_input=instance["text"], + system_prompt=instructions, response_model=str, ) return answer_prediction @@ -118,16 +117,15 @@ async def get_preds(dataset, with_cognee=True): if with_cognee: model_name = "with_cognee" - futures = [ - (instance["instance_id"], generate_patch_with_cognee(instance)) - for instance in dataset - ] + pred_func = generate_patch_with_cognee else: model_name = "without_cognee" - futures = [ - (instance["instance_id"], generate_patch_without_cognee(instance, llm_client)) - for instance in dataset - ] + pred_func = generate_patch_without_cognee + + futures = [ + (instance["instance_id"], pred_func(instance, llm_client)) + for instance in dataset + ] model_patches = await asyncio.gather(*[x[1] for x in futures]) preds = [ From 8f241fa6c51d7fe9db3a7b6d84cf0b45aa2938a1 Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Fri, 29 Nov 2024 12:01:01 +0100 Subject: [PATCH 40/52] convert edge to string --- cognee/infrastructure/llm/prompts/patch_gen_instructions.txt | 4 ++-- evals/eval_swe_bench.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/cognee/infrastructure/llm/prompts/patch_gen_instructions.txt b/cognee/infrastructure/llm/prompts/patch_gen_instructions.txt index 1553753ab..ebbb03f75 100644 --- a/cognee/infrastructure/llm/prompts/patch_gen_instructions.txt +++ b/cognee/infrastructure/llm/prompts/patch_gen_instructions.txt @@ -1,3 +1,3 @@ -I need you to solve this issue by looking at the provided knowledge graph and -generating a single patch file that I can apply directly to this repository using git apply. +I need you to solve this issue by looking at the provided edges retrieved from a knowledge graph and +generate a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. \ No newline at end of file diff --git a/evals/eval_swe_bench.py b/evals/eval_swe_bench.py index c16e821fa..80fa35623 100644 --- a/evals/eval_swe_bench.py +++ b/evals/eval_swe_bench.py @@ -29,7 +29,8 @@ from evals.eval_utils import (delete_repo, download_github_repo, def node_to_string(node): text = node.attributes["text"] - return f"Node({node.id}, {text})" + type = node.attributes["type"] + return f"Node(id: {node.id}, type: {type}, description: {text})" def retrieved_edges_to_string(retrieved_edges): edge_strings = [] for edge in retrieved_edges: From bc82430fb50726cec4464074c9d06fe00bea2535 Mon Sep 17 00:00:00 2001 From: Leon Luithlen Date: Fri, 29 Nov 2024 14:36:03 +0100 Subject: [PATCH 41/52] Merge latest COG-519 --- .../files/utils/get_file_metadata.py | 1 + .../modules/data/operations/write_metadata.py | 21 ++++++++++++------- .../ingestion/ingest_data_with_metadata.py | 3 +-- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/cognee/infrastructure/files/utils/get_file_metadata.py b/cognee/infrastructure/files/utils/get_file_metadata.py index 4aea9560e..a114ef48f 100644 --- a/cognee/infrastructure/files/utils/get_file_metadata.py +++ b/cognee/infrastructure/files/utils/get_file_metadata.py @@ -4,6 +4,7 @@ from .guess_file_type import guess_file_type class FileMetadata(TypedDict): name: str + file_path: str mime_type: str extension: str diff --git a/cognee/modules/data/operations/write_metadata.py b/cognee/modules/data/operations/write_metadata.py index 4b550a6bf..a2ea644ac 100644 --- a/cognee/modules/data/operations/write_metadata.py +++ b/cognee/modules/data/operations/write_metadata.py @@ -4,14 +4,15 @@ import re import warnings from typing import Any from uuid import UUID +from typing import Any, BinaryIO, Union from cognee.infrastructure.databases.relational import get_relational_engine - +from cognee.infrastructure.files.utils.get_file_metadata import FileMetadata from ..models.Metadata import Metadata -async def write_metadata(data_item: Any, data_id: UUID) -> UUID: - metadata_dict = get_metadata_dict(data_item) +async def write_metadata(data_item: Union[BinaryIO, str, Any], data_id: UUID, file_metadata: FileMetadata) -> UUID: + metadata_dict = get_metadata_dict(data_item, file_metadata) db_engine = get_relational_engine() async with db_engine.get_async_session() as session: metadata = Metadata( @@ -34,14 +35,18 @@ def parse_type(type_: Any) -> str: raise Exception(f"type: {type_} could not be parsed") -def get_metadata_dict(metadata: Any) -> dict[str, Any]: - if hasattr(metadata, "dict") and inspect.ismethod(getattr(metadata, "dict")): - return metadata.dict() +def get_metadata_dict(data_item: Union[BinaryIO, str, Any], file_metadata: FileMetadata) -> dict[str, Any]: + if isinstance(data_item, str): + return(file_metadata) + elif isinstance(data_item, BinaryIO): + return(file_metadata) + elif hasattr(data_item, "dict") and inspect.ismethod(getattr(data_item, "dict")): + return {**file_metadata, **data_item.dict()} else: warnings.warn( - f"metadata of type {type(metadata)}: {str(metadata)[:20]}... does not have dict method. Defaulting to string method" + f"metadata of type {type(data_item)}: {str(data_item)[:20]}... does not have dict method. Defaulting to string method" ) try: - return {"content": str(metadata)} + return {**dict(file_metadata), "content": str(data_item)} except Exception as e: raise Exception(f"Could not cast metadata to string: {e}") diff --git a/cognee/tasks/ingestion/ingest_data_with_metadata.py b/cognee/tasks/ingestion/ingest_data_with_metadata.py index 573e2c3c1..0c17b71f5 100644 --- a/cognee/tasks/ingestion/ingest_data_with_metadata.py +++ b/cognee/tasks/ingestion/ingest_data_with_metadata.py @@ -1,7 +1,6 @@ from typing import Any import dlt - import cognee.modules.ingestion as ingestion from cognee.infrastructure.databases.relational import get_relational_engine from cognee.modules.data.methods import create_dataset @@ -76,7 +75,7 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): dataset.data.append(data_point) await session.commit() - await write_metadata(data_item, data_point.id) + await write_metadata(data_item, data_point.id, file_metadata) yield { From a4c56f118d77d0ddc44c8f4071d2560ad6b19cb6 Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Fri, 29 Nov 2024 15:24:49 +0100 Subject: [PATCH 42/52] Connect code graph pipeline + retriever + benchmarking --- .gitignore | 2 +- .../llm/prompts/patch_gen_kg_instructions.txt | 4 +- .../retrieval/brute_force_triplet_search.py | 16 +-- evals/eval_swe_bench.py | 58 +++------ evals/eval_utils.py | 119 +++--------------- 5 files changed, 45 insertions(+), 154 deletions(-) diff --git a/.gitignore b/.gitignore index 47fa54130..edaa94cd4 100644 --- a/.gitignore +++ b/.gitignore @@ -14,7 +14,7 @@ __pycache__/ *$py.class full_run.ipynb -evals/ +logs/ # C extensions *.so diff --git a/cognee/infrastructure/llm/prompts/patch_gen_kg_instructions.txt b/cognee/infrastructure/llm/prompts/patch_gen_kg_instructions.txt index 1553753ab..ebbb03f75 100644 --- a/cognee/infrastructure/llm/prompts/patch_gen_kg_instructions.txt +++ b/cognee/infrastructure/llm/prompts/patch_gen_kg_instructions.txt @@ -1,3 +1,3 @@ -I need you to solve this issue by looking at the provided knowledge graph and -generating a single patch file that I can apply directly to this repository using git apply. +I need you to solve this issue by looking at the provided edges retrieved from a knowledge graph and +generate a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format. \ No newline at end of file diff --git a/cognee/modules/retrieval/brute_force_triplet_search.py b/cognee/modules/retrieval/brute_force_triplet_search.py index 0a4e9dea5..b5ee5b612 100644 --- a/cognee/modules/retrieval/brute_force_triplet_search.py +++ b/cognee/modules/retrieval/brute_force_triplet_search.py @@ -1,13 +1,15 @@ import asyncio import logging from typing import List -from cognee.modules.users.models import User -from cognee.modules.users.methods import get_default_user -from cognee.modules.graph.cognee_graph.CogneeGraph import CogneeGraph -from cognee.infrastructure.databases.vector import get_vector_engine + from cognee.infrastructure.databases.graph import get_graph_engine +from cognee.infrastructure.databases.vector import get_vector_engine +from cognee.modules.graph.cognee_graph.CogneeGraph import CogneeGraph +from cognee.modules.users.methods import get_default_user +from cognee.modules.users.models import User from cognee.shared.utils import send_telemetry + def format_triplets(edges): print("\n\n\n") def filter_attributes(obj, attributes): @@ -48,16 +50,14 @@ def format_triplets(edges): return "".join(triplets) -async def brute_force_triplet_search(query: str, user: User = None, top_k = 5) -> list: +async def brute_force_triplet_search(query: str, user: User = None, top_k = 5, collections = None) -> list: if user is None: user = await get_default_user() if user is None: raise PermissionError("No user found in the system. Please create a user.") - retrieved_results = await brute_force_search(query, user, top_k) - - + retrieved_results = await brute_force_search(query, user, top_k, collections=collections) return retrieved_results diff --git a/evals/eval_swe_bench.py b/evals/eval_swe_bench.py index 694d5c8ea..8e6cfec8e 100644 --- a/evals/eval_swe_bench.py +++ b/evals/eval_swe_bench.py @@ -4,17 +4,24 @@ import subprocess import sys from pathlib import Path -from datasets import Dataset from swebench.harness.utils import load_swebench_dataset from swebench.inference.make_datasets.create_instance import PATCH_EXAMPLE import cognee -from cognee.api.v1.cognify.code_graph_pipeline import code_graph_pipeline from cognee.api.v1.search import SearchType -from cognee.infrastructure.databases.graph import get_graph_engine from cognee.infrastructure.llm.get_llm_client import get_llm_client from cognee.infrastructure.llm.prompts import read_query_prompt -from evals.eval_utils import download_instances +from cognee.modules.pipelines import Task, run_tasks +from cognee.modules.retrieval.brute_force_triplet_search import \ + brute_force_triplet_search +from cognee.shared.data_models import SummarizedContent +from cognee.shared.utils import render_graph +from cognee.tasks.repo_processor import (enrich_dependency_graph, + expand_dependency_graph, + get_repo_file_dependencies) +from cognee.tasks.storage import add_data_points +from cognee.tasks.summarization import summarize_code +from evals.eval_utils import download_github_repo, retrieved_edges_to_string def check_install_package(package_name): @@ -33,44 +40,17 @@ def check_install_package(package_name): return True except subprocess.CalledProcessError: return False -from cognee.modules.pipelines import Task, run_tasks -from cognee.modules.retrieval.brute_force_triplet_search import \ - brute_force_triplet_search -from cognee.shared.data_models import SummarizedContent -from cognee.shared.utils import render_graph -from cognee.tasks.repo_processor import (enrich_dependency_graph, - expand_dependency_graph, - get_repo_file_dependencies) -from cognee.tasks.storage import add_data_points -from cognee.tasks.summarization import summarize_code -from evals.eval_utils import (delete_repo, download_github_repo, - download_instances, ingest_repos) -def node_to_string(node): - text = node.attributes["text"] - type = node.attributes["type"] - return f"Node(id: {node.id}, type: {type}, description: {text})" -def retrieved_edges_to_string(retrieved_edges): - edge_strings = [] - for edge in retrieved_edges: - relationship_type = edge.attributes["relationship_type"] - edge_str = f"{node_to_string(edge.node1)} {relationship_type} {node_to_string(edge.node2)}" - edge_strings.append(edge_str) - return "\n".join(edge_strings) - async def generate_patch_with_cognee(instance, llm_client, search_type=SearchType.CHUNKS): await cognee.prune.prune_data() await cognee.prune.prune_system() - #dataset_name = "SWE_test_data" - - #await cognee.add('', dataset_name = dataset_name) - # repo_path = download_github_repo(instance, '../RAW_GIT_REPOS') - - repo_path = '../minimal_repo' + + repo_path = '/Users/borisarzentar/Projects/graphrag' + tasks = [ Task(get_repo_file_dependencies), Task(add_data_points, task_config = { "batch_size": 50 }), @@ -92,11 +72,12 @@ async def generate_patch_with_cognee(instance, llm_client, search_type=SearchTyp problem_statement = instance['problem_statement'] instructions = read_query_prompt("patch_gen_kg_instructions.txt") - retrieved_edges = await brute_force_triplet_search(problem_statement, top_k = 3) + retrieved_edges = await brute_force_triplet_search(problem_statement, top_k = 3, collections = ["data_point_source_code", "data_point_text"]) retrieved_edges_str = retrieved_edges_to_string(retrieved_edges) prompt = "\n".join([ + problem_statement, "", PATCH_EXAMPLE, "", @@ -175,13 +156,8 @@ async def main(): dataset_name = 'princeton-nlp/SWE-bench_Lite' swe_dataset = load_swebench_dataset( dataset_name, split='test')[:1] - filepath = Path("SWE-bench_testsample") - if filepath.exists(): - dataset = Dataset.load_from_disk(filepath) - else: - dataset = download_instances(swe_dataset, filepath) predictions_path = "preds.json" - preds = await get_preds(dataset, with_cognee=not args.cognee_off) + preds = await get_preds(swe_dataset, with_cognee=not args.cognee_off) with open(predictions_path, "w") as file: json.dump(preds, file) diff --git a/evals/eval_utils.py b/evals/eval_utils.py index 3192127dc..26c4ec2b8 100644 --- a/evals/eval_utils.py +++ b/evals/eval_utils.py @@ -1,107 +1,7 @@ import os -from copy import deepcopy -from pathlib import Path -from tempfile import TemporaryDirectory - -from datasets import Dataset -from swebench.inference.make_datasets.create_instance import make_code_text -from swebench.inference.make_datasets.utils import (AutoContextManager, - ingest_directory_contents) -from tqdm.auto import tqdm -from git import Repo import shutil -def ingest_files(filenames): - files_dict = dict() - for filename in filenames: - with open(filename) as f: - content = f.read() - files_dict[filename] = content - return files_dict - - -def ingest_repos(input_instances): - orig_dir = os.getcwd() - with TemporaryDirectory( - dir="/scratch" if os.path.exists("/scratch") else "/tmp" - ) as root_dir: - for instance in tqdm( - input_instances.values(), - total=len(input_instances), - desc="Downloading repos on specific commits", - ): - try: - with AutoContextManager( - instance, root_dir - ) as cm: - readmes = cm.get_readme_files() - instance["readmes"] = ingest_files(readmes) - instance["file_contents"] = ingest_directory_contents( - cm.repo_path - ) - finally: - # if AutoContextManager fails to exit properly future exits will return the wrong directory - os.chdir(orig_dir) - - return input_instances - - -def extract_fields(instance): - readmes_text = make_code_text(instance["readmes"]) - code_text = make_code_text( - instance["file_contents"], add_line_numbers=False) - - text_inputs = "\n".join([readmes_text, code_text]) - text_inputs = text_inputs.strip() + "\n\n" - # text_inputs = code_text - patch = "\n".join(["", instance["patch"], ""]) - return {**instance, "text": text_inputs, "patch": patch} - - -def create_dataset(input_instances): - columns = [ - "instance_id", - "text", - "repo", - "base_commit", - "problem_statement", - "hints_text", - "created_at", - "patch", - "test_patch", - "version", - "FAIL_TO_PASS", - "PASS_TO_PASS", - "environment_setup_commit", - ] - - data_table = {key: list() for key in columns} - for instance in input_instances.values(): - datum = extract_fields(instance) - for key in columns: - data_table[key].append(datum[key] if key in datum else "") - dataset = Dataset.from_dict(data_table) - - return dataset - - -def download_instances( - input_data, - path=Path("SWE-bench_testsample"), - verbose=False, -): - """Downloads code from github. - - Args: - - input_data: dictionary with unprocessed input instances. - - verbose: set ContextManager verbose to True - """ - input_instances = {x["instance_id"]: x for x in input_data} - input_instances_copy = deepcopy(input_instances) - input_instances_with_text = ingest_repos(input_instances_copy) - dataset = create_dataset(input_instances_with_text) - dataset.save_to_disk(path) - return dataset +from git import Repo def download_github_repo(instance, output_dir): @@ -154,4 +54,19 @@ def delete_repo(repo_path): else: print(f"Repository path {repo_path} does not exist. Nothing to delete.") except Exception as e: - print(f"Error deleting repository at {repo_path}: {e}") \ No newline at end of file + print(f"Error deleting repository at {repo_path}: {e}") + + +def node_to_string(node): + text = node.attributes["text"] + type = node.attributes["type"] + return f"Node(id: {node.id}, type: {type}, description: {text})" + + +def retrieved_edges_to_string(retrieved_edges): + edge_strings = [] + for edge in retrieved_edges: + relationship_type = edge.attributes["relationship_type"] + edge_str = f"{node_to_string(edge.node1)} {relationship_type} {node_to_string(edge.node2)}" + edge_strings.append(edge_str) + return "\n".join(edge_strings) \ No newline at end of file From 198f71b9be16c8fb89d538ecce06cb62773e72b1 Mon Sep 17 00:00:00 2001 From: hajdul88 <52442977+hajdul88@users.noreply.github.com> Date: Sun, 1 Dec 2024 11:51:04 +0100 Subject: [PATCH 43/52] feat: Implements multiprocessing for get_repo_file_dependencies task (#43) --- .../get_repo_file_dependencies.py | 74 ++++++++++--------- 1 file changed, 41 insertions(+), 33 deletions(-) diff --git a/cognee/tasks/repo_processor/get_repo_file_dependencies.py b/cognee/tasks/repo_processor/get_repo_file_dependencies.py index 9ac4e9f2e..746721f1f 100644 --- a/cognee/tasks/repo_processor/get_repo_file_dependencies.py +++ b/cognee/tasks/repo_processor/get_repo_file_dependencies.py @@ -2,9 +2,9 @@ import os from typing import AsyncGenerator from uuid import NAMESPACE_OID, uuid5 import aiofiles -from tqdm.asyncio import tqdm +from concurrent.futures import ProcessPoolExecutor +import asyncio -from cognee.infrastructure.engine import DataPoint from cognee.shared.CodeGraphEntities import CodeFile, Repository from cognee.tasks.repo_processor.get_local_dependencies import get_local_script_dependencies @@ -45,46 +45,54 @@ def get_edge(file_path: str, dependency: str, repo_path: str, relative_paths: bo return (file_path, dependency, {"relation": "depends_directly_on"}) -async def get_repo_file_dependencies(repo_path: str) -> AsyncGenerator[list[DataPoint], None]: +def run_coroutine(coroutine_func, *args, **kwargs): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + result = loop.run_until_complete(coroutine_func(*args, **kwargs)) + loop.close() + return result + +async def get_repo_file_dependencies(repo_path: str) -> AsyncGenerator[list, None]: """Generate a dependency graph for Python files in the given repository path.""" py_files_dict = await get_py_files_dict(repo_path) repo = Repository( - id = uuid5(NAMESPACE_OID, repo_path), - path = repo_path, + id=uuid5(NAMESPACE_OID, repo_path), + path=repo_path, ) - # data_points = [repo] yield repo - # dependency_graph = nx.DiGraph() + with ProcessPoolExecutor(max_workers=12) as executor: + loop = asyncio.get_event_loop() - # dependency_graph.add_nodes_from(py_files_dict.items()) + tasks = [ + loop.run_in_executor( + executor, + run_coroutine, + get_local_script_dependencies, + os.path.join(repo_path, file_path), + repo_path + ) + for file_path, metadata in py_files_dict.items() + if metadata.get("source_code") is not None + ] - async for file_path, metadata in tqdm(py_files_dict.items(), desc="Repo dependency graph", unit="file"): - source_code = metadata.get("source_code") - if source_code is None: - continue + results = await asyncio.gather(*tasks) - dependencies = await get_local_script_dependencies(os.path.join(repo_path, file_path), repo_path) + for (file_path, metadata), dependencies in zip(py_files_dict.items(), results): + source_code = metadata.get("source_code") - # data_points.append() - yield CodeFile( - id = uuid5(NAMESPACE_OID, file_path), - source_code = source_code, - extracted_id = file_path, - part_of = repo, - depends_on = [ - CodeFile( - id = uuid5(NAMESPACE_OID, dependency), - extracted_id = dependency, - part_of = repo, - ) for dependency in dependencies - ] if len(dependencies) else None, - ) - # dependency_edges = [get_edge(file_path, dependency, repo_path) for dependency in dependencies] - - # dependency_graph.add_edges_from(dependency_edges) - - # return data_points - # return dependency_graph + yield CodeFile( + id=uuid5(NAMESPACE_OID, file_path), + source_code=source_code, + extracted_id=file_path, + part_of=repo, + depends_on=[ + CodeFile( + id=uuid5(NAMESPACE_OID, dependency), + extracted_id=dependency, + part_of=repo, + ) for dependency in dependencies + ] if dependencies else None, + ) From 11acabdb6a220ef2adc0df0a64496038e08f3c0d Mon Sep 17 00:00:00 2001 From: Boris Arzentar Date: Mon, 2 Dec 2024 10:10:18 +0100 Subject: [PATCH 44/52] fix: remove duplicate nodes and edges before saving; Fix FalkorDB vector index; --- .../hybrid/falkordb/FalkorDBAdapter.py | 11 +- cognee/modules/graph/utils/__init__.py | 1 + .../utils/deduplicate_nodes_and_edges.py | 19 +++ .../graph/utils/get_graph_from_model.py | 116 +++++++----------- cognee/shared/CodeGraphEntities.py | 3 + .../get_repo_file_dependencies.py | 23 ++-- cognee/tasks/storage/add_data_points.py | 6 +- cognee/tasks/storage/index_data_points.py | 11 +- .../get_graph_from_model_generative_test.py | 4 +- .../graph/get_graph_from_model_test.py | 20 +-- ...del_instance_from_graph_generative_test.py | 4 +- .../get_model_instance_from_graph_test.py | 4 +- evals/eval_swe_bench.py | 5 +- .../profile_graph_pydantic_conversion.py | 11 +- 14 files changed, 118 insertions(+), 120 deletions(-) create mode 100644 cognee/modules/graph/utils/deduplicate_nodes_and_edges.py diff --git a/cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py b/cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py index 32a9853c2..1509bb428 100644 --- a/cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py +++ b/cognee/infrastructure/databases/hybrid/falkordb/FalkorDBAdapter.py @@ -67,8 +67,9 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): node_properties = await self.stringify_properties({ **data_point.model_dump(), **({ - property_names[index]: (vectorized_values[index] if index in vectorized_values else None) \ - for index in range(len(property_names)) \ + property_names[index]: (vectorized_values[index] \ + if index < len(vectorized_values) else getattr(data_point, property_name, None)) \ + for index, property_name in enumerate(property_names) }), }) @@ -111,8 +112,8 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): property_value = getattr(data_point, property_name, None) if property_value is not None: + vector_map[key][property_name] = len(embeddable_values) embeddable_values.append(property_value) - vector_map[key][property_name] = len(embeddable_values) - 1 else: vector_map[key][property_name] = None @@ -123,7 +124,9 @@ class FalkorDBAdapter(VectorDBInterface, GraphDBInterface): data_point, [ vectorized_values[vector_map[str(data_point.id)][property_name]] \ - for property_name in DataPoint.get_embeddable_property_names(data_point) + if vector_map[str(data_point.id)][property_name] is not None \ + else None \ + for property_name in DataPoint.get_embeddable_property_names(data_point) ], ) for data_point in data_points ] diff --git a/cognee/modules/graph/utils/__init__.py b/cognee/modules/graph/utils/__init__.py index c4fa0d654..d1cda2d83 100644 --- a/cognee/modules/graph/utils/__init__.py +++ b/cognee/modules/graph/utils/__init__.py @@ -3,3 +3,4 @@ from .get_graph_from_model import get_graph_from_model from .get_model_instance_from_graph import get_model_instance_from_graph from .retrieve_existing_edges import retrieve_existing_edges from .convert_node_to_data_point import convert_node_to_data_point +from .deduplicate_nodes_and_edges import deduplicate_nodes_and_edges diff --git a/cognee/modules/graph/utils/deduplicate_nodes_and_edges.py b/cognee/modules/graph/utils/deduplicate_nodes_and_edges.py new file mode 100644 index 000000000..e863960ea --- /dev/null +++ b/cognee/modules/graph/utils/deduplicate_nodes_and_edges.py @@ -0,0 +1,19 @@ +from cognee.infrastructure.engine import DataPoint + +def deduplicate_nodes_and_edges(nodes: list[DataPoint], edges: list[dict]): + added_entities = {} + final_nodes = [] + final_edges = [] + + for node in nodes: + if str(node.id) not in added_entities: + final_nodes.append(node) + added_entities[str(node.id)] = True + + for edge in edges: + edge_key = str(edge[0]) + str(edge[2]) + str(edge[1]) + if edge_key not in added_entities: + final_edges.append(edge) + added_entities[edge_key] = True + + return final_nodes, final_edges diff --git a/cognee/modules/graph/utils/get_graph_from_model.py b/cognee/modules/graph/utils/get_graph_from_model.py index ccfba1adf..d49908354 100644 --- a/cognee/modules/graph/utils/get_graph_from_model.py +++ b/cognee/modules/graph/utils/get_graph_from_model.py @@ -8,18 +8,20 @@ async def get_graph_from_model( include_root = True, added_nodes = None, added_edges = None, + visited_properties = None, ): - if data_point.id in added_nodes: - return [], [] - nodes = [] edges = [] added_nodes = added_nodes or {} added_edges = added_edges or {} + visited_properties = visited_properties or {} data_point_properties = {} excluded_properties = set() + if include_root: + added_nodes[str(data_point.id)] = True + for field_name, field_value in data_point: if field_name == "_metadata": continue @@ -30,7 +32,15 @@ async def get_graph_from_model( if isinstance(field_value, DataPoint): excluded_properties.add(field_name) - nodes, edges, added_nodes, added_edges = add_nodes_and_edges( + + property_key = f"{str(data_point.id)}{field_name}{str(field_value.id)}" + + if property_key in visited_properties: + continue + + visited_properties[property_key] = True + + nodes, edges = await add_nodes_and_edges( data_point, field_name, field_value, @@ -38,77 +48,33 @@ async def get_graph_from_model( edges, added_nodes, added_edges, + visited_properties, ) - property_nodes, property_edges = await get_graph_from_model( - field_value, - True, - added_nodes, - added_edges, - ) - - for node in property_nodes: - if str(node.id) not in added_nodes: - nodes.append(node) - added_nodes[str(node.id)] = True - - for edge in property_edges: - edge_key = str(edge[0]) + str(edge[1]) + edge[2] - - if str(edge_key) not in added_edges: - edges.append(edge) - added_edges[str(edge_key)] = True - - for property_node in get_own_properties(property_nodes, property_edges): - edge_key = str(data_point.id) + str(property_node.id) + field_name - - if str(edge_key) not in added_edges: - edges.append((data_point.id, property_node.id, field_name, { - "source_node_id": data_point.id, - "target_node_id": property_node.id, - "relationship_name": field_name, - "updated_at": datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S"), - })) - added_edges[str(edge_key)] = True continue if isinstance(field_value, list) and len(field_value) > 0 and isinstance(field_value[0], DataPoint): excluded_properties.add(field_name) - for item in field_value: - property_nodes, property_edges = await get_graph_from_model( - item, - True, + for field_value_item in field_value: + property_key = f"{str(data_point.id)}{field_name}{str(field_value_item.id)}" + + if property_key in visited_properties: + continue + + visited_properties[property_key] = True + + nodes, edges = await add_nodes_and_edges( + data_point, + field_name, + field_value_item, + nodes, + edges, added_nodes, added_edges, + visited_properties, ) - for node in property_nodes: - if str(node.id) not in added_nodes: - nodes.append(node) - added_nodes[str(node.id)] = True - - for edge in property_edges: - edge_key = str(edge[0]) + str(edge[1]) + edge[2] - - if str(edge_key) not in added_edges: - edges.append(edge) - added_edges[edge_key] = True - - for property_node in get_own_properties(property_nodes, property_edges): - edge_key = str(data_point.id) + str(property_node.id) + field_name - - if str(edge_key) not in added_edges: - edges.append((data_point.id, property_node.id, field_name, { - "source_node_id": data_point.id, - "target_node_id": property_node.id, - "relationship_name": field_name, - "updated_at": datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S"), - "metadata": { - "type": "list" - }, - })) - added_edges[edge_key] = True continue data_point_properties[field_name] = field_value @@ -128,12 +94,22 @@ async def get_graph_from_model( return nodes, edges -def add_nodes_and_edges( - data_point, field_name, field_value, nodes, edges, added_nodes, added_edges +async def add_nodes_and_edges( + data_point, + field_name, + field_value, + nodes, + edges, + added_nodes, + added_edges, + visited_properties, ): - - property_nodes, property_edges = get_graph_from_model( - field_value, dict(added_nodes), dict(added_edges) + property_nodes, property_edges = await get_graph_from_model( + field_value, + True, + added_nodes, + added_edges, + visited_properties, ) for node in property_nodes: @@ -169,7 +145,7 @@ def add_nodes_and_edges( ) added_edges[str(edge_key)] = True - return (nodes, edges, added_nodes, added_edges) + return (nodes, edges) def get_own_properties(property_nodes, property_edges): diff --git a/cognee/shared/CodeGraphEntities.py b/cognee/shared/CodeGraphEntities.py index d709b8d3a..87c37bcc9 100644 --- a/cognee/shared/CodeGraphEntities.py +++ b/cognee/shared/CodeGraphEntities.py @@ -2,10 +2,12 @@ from typing import List, Optional from cognee.infrastructure.engine import DataPoint class Repository(DataPoint): + __tablename__ = "Repository" path: str type: Optional[str] = "Repository" class CodeFile(DataPoint): + __tablename__ = "CodeFile" extracted_id: str # actually file path type: Optional[str] = "CodeFile" source_code: Optional[str] = None @@ -19,6 +21,7 @@ class CodeFile(DataPoint): } class CodePart(DataPoint): + __tablename__ = "CodePart" # part_of: Optional[CodeFile] source_code: str type: Optional[str] = "CodePart" diff --git a/cognee/tasks/repo_processor/get_repo_file_dependencies.py b/cognee/tasks/repo_processor/get_repo_file_dependencies.py index 746721f1f..352cbe19b 100644 --- a/cognee/tasks/repo_processor/get_repo_file_dependencies.py +++ b/cognee/tasks/repo_processor/get_repo_file_dependencies.py @@ -57,13 +57,13 @@ async def get_repo_file_dependencies(repo_path: str) -> AsyncGenerator[list, Non py_files_dict = await get_py_files_dict(repo_path) repo = Repository( - id=uuid5(NAMESPACE_OID, repo_path), - path=repo_path, + id = uuid5(NAMESPACE_OID, repo_path), + path = repo_path, ) yield repo - with ProcessPoolExecutor(max_workers=12) as executor: + with ProcessPoolExecutor(max_workers = 12) as executor: loop = asyncio.get_event_loop() tasks = [ @@ -84,15 +84,16 @@ async def get_repo_file_dependencies(repo_path: str) -> AsyncGenerator[list, Non source_code = metadata.get("source_code") yield CodeFile( - id=uuid5(NAMESPACE_OID, file_path), - source_code=source_code, - extracted_id=file_path, - part_of=repo, - depends_on=[ + id = uuid5(NAMESPACE_OID, file_path), + source_code = source_code, + extracted_id = file_path, + part_of = repo, + depends_on = [ CodeFile( - id=uuid5(NAMESPACE_OID, dependency), - extracted_id=dependency, - part_of=repo, + id = uuid5(NAMESPACE_OID, dependency), + extracted_id = dependency, + part_of = repo, + source_code = py_files_dict.get(dependency, {}).get("source_code"), ) for dependency in dependencies ] if dependencies else None, ) diff --git a/cognee/tasks/storage/add_data_points.py b/cognee/tasks/storage/add_data_points.py index 33f9d7a70..c7af36007 100644 --- a/cognee/tasks/storage/add_data_points.py +++ b/cognee/tasks/storage/add_data_points.py @@ -1,7 +1,7 @@ import asyncio from cognee.infrastructure.engine import DataPoint from cognee.infrastructure.databases.graph import get_graph_engine -from cognee.modules.graph.utils import get_graph_from_model +from cognee.modules.graph.utils import deduplicate_nodes_and_edges, get_graph_from_model from .index_data_points import index_data_points @@ -17,9 +17,11 @@ async def add_data_points(data_points: list[DataPoint]): nodes.extend(result_nodes) edges.extend(result_edges) + nodes, edges = deduplicate_nodes_and_edges(nodes, edges) + graph_engine = await get_graph_engine() - await index_data_points(data_points) + await index_data_points(nodes) await graph_engine.add_nodes(nodes) await graph_engine.add_edges(edges) diff --git a/cognee/tasks/storage/index_data_points.py b/cognee/tasks/storage/index_data_points.py index 01c2c2796..58e4f096d 100644 --- a/cognee/tasks/storage/index_data_points.py +++ b/cognee/tasks/storage/index_data_points.py @@ -8,16 +8,7 @@ async def index_data_points(data_points: list[DataPoint]): vector_engine = get_vector_engine() - flat_data_points: list[DataPoint] = [] - - results = await asyncio.gather(*[ - get_data_points_from_model(data_point) for data_point in data_points - ]) - - for result in results: - flat_data_points.extend(result) - - for data_point in flat_data_points: + for data_point in data_points: data_point_type = type(data_point) for field_name in data_point._metadata["index_fields"]: diff --git a/cognee/tests/unit/interfaces/graph/get_graph_from_model_generative_test.py b/cognee/tests/unit/interfaces/graph/get_graph_from_model_generative_test.py index dec751f89..73aa5972b 100644 --- a/cognee/tests/unit/interfaces/graph/get_graph_from_model_generative_test.py +++ b/cognee/tests/unit/interfaces/graph/get_graph_from_model_generative_test.py @@ -11,7 +11,7 @@ from cognee.tests.unit.interfaces.graph.util import ( @pytest.mark.parametrize("recursive_depth", [1, 2, 3]) -def test_society_nodes_and_edges(recursive_depth): +async def test_society_nodes_and_edges(recursive_depth): import sys if sys.version_info[0] == 3 and sys.version_info[1] >= 11: @@ -22,7 +22,7 @@ def test_society_nodes_and_edges(recursive_depth): n_organizations, n_persons = count_society(society) society_counts_total = n_organizations + n_persons - nodes, edges = get_graph_from_model(society) + nodes, edges = await get_graph_from_model(society) assert ( len(nodes) == society_counts_total diff --git a/cognee/tests/unit/interfaces/graph/get_graph_from_model_test.py b/cognee/tests/unit/interfaces/graph/get_graph_from_model_test.py index e56a2dff2..bed476254 100644 --- a/cognee/tests/unit/interfaces/graph/get_graph_from_model_test.py +++ b/cognee/tests/unit/interfaces/graph/get_graph_from_model_test.py @@ -48,29 +48,29 @@ PERSON_GROUND_TRUTH = { } -def test_extracted_car_type(boris): - nodes, _ = get_graph_from_model(boris) +async def test_extracted_car_type(boris): + nodes, _ = await get_graph_from_model(boris) assert len(nodes) == 3 car_type = nodes[0] run_test_against_ground_truth("car_type", car_type, CAR_TYPE_GROUND_TRUTH) -def test_extracted_car(boris): - nodes, _ = get_graph_from_model(boris) +async def test_extracted_car(boris): + nodes, _ = await get_graph_from_model(boris) assert len(nodes) == 3 car = nodes[1] run_test_against_ground_truth("car", car, CAR_GROUND_TRUTH) -def test_extracted_person(boris): - nodes, _ = get_graph_from_model(boris) +async def test_extracted_person(boris): + nodes, _ = await get_graph_from_model(boris) assert len(nodes) == 3 person = nodes[2] run_test_against_ground_truth("person", person, PERSON_GROUND_TRUTH) -def test_extracted_car_sedan_edge(boris): - _, edges = get_graph_from_model(boris) +async def test_extracted_car_sedan_edge(boris): + _, edges = await get_graph_from_model(boris) edge = edges[0] assert CAR_SEDAN_EDGE[:3] == edge[:3], f"{CAR_SEDAN_EDGE[:3] = } != {edge[:3] = }" @@ -78,8 +78,8 @@ def test_extracted_car_sedan_edge(boris): assert ground_truth == edge[3][key], f"{ground_truth = } != {edge[3][key] = }" -def test_extracted_boris_car_edge(boris): - _, edges = get_graph_from_model(boris) +async def test_extracted_boris_car_edge(boris): + _, edges = await get_graph_from_model(boris) edge = edges[1] assert ( diff --git a/cognee/tests/unit/interfaces/graph/get_model_instance_from_graph_generative_test.py b/cognee/tests/unit/interfaces/graph/get_model_instance_from_graph_generative_test.py index dd5e19469..9f7462c85 100644 --- a/cognee/tests/unit/interfaces/graph/get_model_instance_from_graph_generative_test.py +++ b/cognee/tests/unit/interfaces/graph/get_model_instance_from_graph_generative_test.py @@ -14,14 +14,14 @@ from cognee.tests.unit.interfaces.graph.util import ( @pytest.mark.parametrize("recursive_depth", [1, 2, 3]) -def test_society_nodes_and_edges(recursive_depth): +async def test_society_nodes_and_edges(recursive_depth): import sys if sys.version_info[0] == 3 and sys.version_info[1] >= 11: society = create_organization_recursive( "society", "Society", PERSON_NAMES, recursive_depth ) - nodes, edges = get_graph_from_model(society) + nodes, edges = await get_graph_from_model(society) parsed_society = get_model_instance_from_graph(nodes, edges, "society") assert str(society) == (str(parsed_society)), show_first_difference( diff --git a/cognee/tests/unit/interfaces/graph/get_model_instance_from_graph_test.py b/cognee/tests/unit/interfaces/graph/get_model_instance_from_graph_test.py index f1aa7736d..6bdaedcaf 100644 --- a/cognee/tests/unit/interfaces/graph/get_model_instance_from_graph_test.py +++ b/cognee/tests/unit/interfaces/graph/get_model_instance_from_graph_test.py @@ -25,8 +25,8 @@ CAR_GROUND_TRUTH = { } -def test_parsed_person(boris): - nodes, edges = get_graph_from_model(boris) +async def test_parsed_person(boris): + nodes, edges = await get_graph_from_model(boris) parsed_person = get_model_instance_from_graph(nodes, edges, "boris") run_test_against_ground_truth( diff --git a/evals/eval_swe_bench.py b/evals/eval_swe_bench.py index 8e6cfec8e..9cd679429 100644 --- a/evals/eval_swe_bench.py +++ b/evals/eval_swe_bench.py @@ -43,7 +43,6 @@ def check_install_package(package_name): async def generate_patch_with_cognee(instance, llm_client, search_type=SearchType.CHUNKS): - await cognee.prune.prune_data() await cognee.prune.prune_system() @@ -57,11 +56,11 @@ async def generate_patch_with_cognee(instance, llm_client, search_type=SearchTyp Task(enrich_dependency_graph, task_config = { "batch_size": 50 }), Task(expand_dependency_graph, task_config = { "batch_size": 50 }), Task(add_data_points, task_config = { "batch_size": 50 }), - Task(summarize_code, summarization_model = SummarizedContent), + # Task(summarize_code, summarization_model = SummarizedContent), ] pipeline = run_tasks(tasks, repo_path, "cognify_code_pipeline") - + async for result in pipeline: print(result) diff --git a/profiling/graph_pydantic_conversion/profile_graph_pydantic_conversion.py b/profiling/graph_pydantic_conversion/profile_graph_pydantic_conversion.py index 664186c28..48d5352a6 100644 --- a/profiling/graph_pydantic_conversion/profile_graph_pydantic_conversion.py +++ b/profiling/graph_pydantic_conversion/profile_graph_pydantic_conversion.py @@ -1,7 +1,7 @@ import argparse -import time +import asyncio -from benchmark_function import benchmark_function +from .benchmark_function import benchmark_function from cognee.modules.graph.utils import get_graph_from_model from cognee.tests.unit.interfaces.graph.util import ( @@ -28,9 +28,12 @@ if __name__ == "__main__": society = create_organization_recursive( "society", "Society", PERSON_NAMES, args.recursive_depth ) - nodes, edges = get_graph_from_model(society) + nodes, edges = asyncio.run(get_graph_from_model(society)) - results = benchmark_function(get_graph_from_model, society, num_runs=args.runs) + def get_graph_from_model_sync(model): + return asyncio.run(get_graph_from_model(model)) + + results = benchmark_function(get_graph_from_model_sync, society, num_runs=args.runs) print("\nBenchmark Results:") print( f"N nodes: {len(nodes)}, N edges: {len(edges)}, Recursion depth: {args.recursive_depth}" From f966f099fc0064a7b7fcaf236255b069d64a35d9 Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Mon, 2 Dec 2024 12:18:00 +0100 Subject: [PATCH 45/52] Prompt renaming to more specific names. Minor code changes. --- ...uestion.txt => answer_hotpot_question.txt} | 0 ... => answer_hotpot_using_cognee_search.txt} | 0 evals/llm_as_a_judge.py | 21 +++++++++---------- 3 files changed, 10 insertions(+), 11 deletions(-) rename cognee/infrastructure/llm/prompts/{answer_question.txt => answer_hotpot_question.txt} (100%) rename cognee/infrastructure/llm/prompts/{answer_question_kg.txt => answer_hotpot_using_cognee_search.txt} (100%) diff --git a/cognee/infrastructure/llm/prompts/answer_question.txt b/cognee/infrastructure/llm/prompts/answer_hotpot_question.txt similarity index 100% rename from cognee/infrastructure/llm/prompts/answer_question.txt rename to cognee/infrastructure/llm/prompts/answer_hotpot_question.txt diff --git a/cognee/infrastructure/llm/prompts/answer_question_kg.txt b/cognee/infrastructure/llm/prompts/answer_hotpot_using_cognee_search.txt similarity index 100% rename from cognee/infrastructure/llm/prompts/answer_question_kg.txt rename to cognee/infrastructure/llm/prompts/answer_hotpot_using_cognee_search.txt diff --git a/evals/llm_as_a_judge.py b/evals/llm_as_a_judge.py index 8dd1518a7..0398f9422 100644 --- a/evals/llm_as_a_judge.py +++ b/evals/llm_as_a_judge.py @@ -63,37 +63,36 @@ async def answer_with_cognee(instance): async def eval_answers(instances, answers, eval_metric): test_cases = [] - for i in range(len(answers)): - instance = instances[i] - answer = answers[i] + for instance, answer in zip(instances, answers): test_case = LLMTestCase( input=instance["question"], actual_output=answer, expected_output=instance["answer"] ) test_cases.append(test_case) - evalset = EvaluationDataset(test_cases) - evalresults = evalset.evaluate([eval_metric]) - return evalresults + eval_set = EvaluationDataset(test_cases) + eval_results = eval_set.evaluate([eval_metric]) + return eval_results async def eval_on_hotpotQA(answer_provider, num_samples, eval_metric): base_config = get_base_config() data_root_dir = base_config.data_root_directory + if not Path(data_root_dir).exists(): + data_root_dir.mkdir() filepath = data_root_dir / Path("hotpot_dev_fullwiki_v1.json") if not filepath.exists(): url = 'http://curtis.ml.cmu.edu/datasets/hotpot/hotpot_dev_fullwiki_v1.json' wget.download(url, out=data_root_dir) with open(filepath, "r") as file: dataset = json.load(file) - if not num_samples: - num_samples = len(dataset) - instances = dataset[:num_samples] + + instances = dataset if not num_samples else dataset[:num_samples] answers = [] for instance in tqdm(instances, desc="Getting answers"): answer = await answer_provider(instance) answers.append(answer) - evalresults = await eval_answers(instances, answers, eval_metric) - avg_score = statistics.mean([result.metrics_data[0].score for result in evalresults.test_results]) + eval_results = await eval_answers(instances, answers, eval_metric) + avg_score = statistics.mean([result.metrics_data[0].score for result in eval_results.test_results]) return avg_score if __name__ == "__main__": From 0eb40d07617e244d43552853588d465f47bac57c Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Mon, 2 Dec 2024 13:31:39 +0100 Subject: [PATCH 46/52] updating dependencies with deepeval as optional --- poetry.lock | 2068 +++++++++++++++++++++++++++--------------------- pyproject.toml | 1 + 2 files changed, 1146 insertions(+), 923 deletions(-) diff --git a/poetry.lock b/poetry.lock index 96f9aec27..0450b1b12 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,13 +13,13 @@ files = [ [[package]] name = "aiohappyeyeballs" -version = "2.4.3" +version = "2.4.4" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, - {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, + {file = "aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8"}, + {file = "aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745"}, ] [[package]] @@ -346,21 +346,18 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "asttokens" -version = "2.4.1" +version = "3.0.0" description = "Annotate AST trees with source code positions" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, + {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, + {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, ] -[package.dependencies] -six = ">=1.12.0" - [package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] +astroid = ["astroid (>=2,<4)"] +test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] [[package]] name = "astunparse" @@ -528,38 +525,36 @@ files = [ [[package]] name = "bcrypt" -version = "4.2.0" +version = "4.2.1" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" files = [ - {file = "bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7"}, - {file = "bcrypt-4.2.0-cp37-abi3-win32.whl", hash = "sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458"}, - {file = "bcrypt-4.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5"}, - {file = "bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8"}, - {file = "bcrypt-4.2.0-cp39-abi3-win32.whl", hash = "sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34"}, - {file = "bcrypt-4.2.0-cp39-abi3-win_amd64.whl", hash = "sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9"}, - {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a"}, - {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db"}, - {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170"}, - {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184"}, - {file = "bcrypt-4.2.0.tar.gz", hash = "sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221"}, + {file = "bcrypt-4.2.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dbd0747208912b1e4ce730c6725cb56c07ac734b3629b60d4398f082ea718ad"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:aaa2e285be097050dba798d537b6efd9b698aa88eef52ec98d23dcd6d7cf6fea"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:76d3e352b32f4eeb34703370e370997065d28a561e4a18afe4fef07249cb4396"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b7703ede632dc945ed1172d6f24e9f30f27b1b1a067f32f68bf169c5f08d0425"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:89df2aea2c43be1e1fa066df5f86c8ce822ab70a30e4c210968669565c0f4685"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:04e56e3fe8308a88b77e0afd20bec516f74aecf391cdd6e374f15cbed32783d6"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cfdf3d7530c790432046c40cda41dfee8c83e29482e6a604f8930b9930e94139"}, + {file = "bcrypt-4.2.1-cp37-abi3-win32.whl", hash = "sha256:adadd36274510a01f33e6dc08f5824b97c9580583bd4487c564fc4617b328005"}, + {file = "bcrypt-4.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:8c458cd103e6c5d1d85cf600e546a639f234964d0228909d8f8dbeebff82d526"}, + {file = "bcrypt-4.2.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8ad2f4528cbf0febe80e5a3a57d7a74e6635e41af1ea5675282a33d769fba413"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909faa1027900f2252a9ca5dfebd25fc0ef1417943824783d1c8418dd7d6df4a"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cde78d385d5e93ece5479a0a87f73cd6fa26b171c786a884f955e165032b262c"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:533e7f3bcf2f07caee7ad98124fab7499cb3333ba2274f7a36cf1daee7409d99"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:687cf30e6681eeda39548a93ce9bfbb300e48b4d445a43db4298d2474d2a1e54"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:041fa0155c9004eb98a232d54da05c0b41d4b8e66b6fc3cb71b4b3f6144ba837"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f85b1ffa09240c89aa2e1ae9f3b1c687104f7b2b9d2098da4e923f1b7082d331"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c6f5fa3775966cca251848d4d5393ab016b3afed251163c1436fefdec3b02c84"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:807261df60a8b1ccd13e6599c779014a362ae4e795f5c59747f60208daddd96d"}, + {file = "bcrypt-4.2.1-cp39-abi3-win32.whl", hash = "sha256:b588af02b89d9fad33e5f98f7838bf590d6d692df7153647724a7f20c186f6bf"}, + {file = "bcrypt-4.2.1-cp39-abi3-win_amd64.whl", hash = "sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c"}, + {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76132c176a6d9953cdc83c296aeaed65e1a708485fd55abf163e0d9f8f16ce0e"}, + {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e158009a54c4c8bc91d5e0da80920d048f918c61a581f0a63e4e93bb556d362f"}, + {file = "bcrypt-4.2.1.tar.gz", hash = "sha256:6765386e3ab87f569b276988742039baab087b2cdb01e809d74e74503c2faafe"}, ] [package.extras] @@ -606,17 +601,17 @@ css = ["tinycss2 (>=1.1.0,<1.5)"] [[package]] name = "boto3" -version = "1.35.57" +version = "1.35.72" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.57-py3-none-any.whl", hash = "sha256:9edf49640c79a05b0a72f4c2d1e24dfc164344b680535a645f455ac624dc3680"}, - {file = "boto3-1.35.57.tar.gz", hash = "sha256:db58348849a5af061f0f5ec9c3b699da5221ca83354059fdccb798e3ddb6b62a"}, + {file = "boto3-1.35.72-py3-none-any.whl", hash = "sha256:410bb4ec676c57ee9c3c7824b7b1a3721584f18f8ee8ccc8e8ecdf285136b77f"}, + {file = "boto3-1.35.72.tar.gz", hash = "sha256:f9fc94413a959c388b1654c6687a5193293f3c69f8d0af3b86fd48b4096a23f3"}, ] [package.dependencies] -botocore = ">=1.35.57,<1.36.0" +botocore = ">=1.35.72,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -625,13 +620,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.57" +version = "1.35.72" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.57-py3-none-any.whl", hash = "sha256:92ddd02469213766872cb2399269dd20948f90348b42bf08379881d5e946cc34"}, - {file = "botocore-1.35.57.tar.gz", hash = "sha256:d96306558085baf0bcb3b022d7a8c39c93494f031edb376694d2b2dcd0e81327"}, + {file = "botocore-1.35.72-py3-none-any.whl", hash = "sha256:7412877c3f766a1bfd09236e225ce1f0dc2c35e47949ae423e56e2093c8fa23a"}, + {file = "botocore-1.35.72.tar.gz", hash = "sha256:6b5fac38ef7cfdbc7781a751e0f78833ccb9149ba815bc238b1dbb75c90fbae5"}, ] [package.dependencies] @@ -988,73 +983,73 @@ test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist" [[package]] name = "coverage" -version = "7.6.4" +version = "7.6.8" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07"}, - {file = "coverage-7.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0"}, - {file = "coverage-7.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72"}, - {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51"}, - {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491"}, - {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b"}, - {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea"}, - {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a"}, - {file = "coverage-7.6.4-cp310-cp310-win32.whl", hash = "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa"}, - {file = "coverage-7.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172"}, - {file = "coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b"}, - {file = "coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25"}, - {file = "coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546"}, - {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b"}, - {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e"}, - {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718"}, - {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db"}, - {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522"}, - {file = "coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf"}, - {file = "coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19"}, - {file = "coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2"}, - {file = "coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117"}, - {file = "coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613"}, - {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27"}, - {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52"}, - {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2"}, - {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1"}, - {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5"}, - {file = "coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17"}, - {file = "coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08"}, - {file = "coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9"}, - {file = "coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba"}, - {file = "coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c"}, - {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06"}, - {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f"}, - {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b"}, - {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21"}, - {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a"}, - {file = "coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e"}, - {file = "coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963"}, - {file = "coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f"}, - {file = "coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806"}, - {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11"}, - {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3"}, - {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a"}, - {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc"}, - {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70"}, - {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef"}, - {file = "coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e"}, - {file = "coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1"}, - {file = "coverage-7.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3"}, - {file = "coverage-7.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c"}, - {file = "coverage-7.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076"}, - {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376"}, - {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0"}, - {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858"}, - {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111"}, - {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901"}, - {file = "coverage-7.6.4-cp39-cp39-win32.whl", hash = "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09"}, - {file = "coverage-7.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f"}, - {file = "coverage-7.6.4-pp39.pp310-none-any.whl", hash = "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e"}, - {file = "coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73"}, + {file = "coverage-7.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b39e6011cd06822eb964d038d5dff5da5d98652b81f5ecd439277b32361a3a50"}, + {file = "coverage-7.6.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63c19702db10ad79151a059d2d6336fe0c470f2e18d0d4d1a57f7f9713875dcf"}, + {file = "coverage-7.6.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3985b9be361d8fb6b2d1adc9924d01dec575a1d7453a14cccd73225cb79243ee"}, + {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644ec81edec0f4ad17d51c838a7d01e42811054543b76d4ba2c5d6af741ce2a6"}, + {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f188a2402f8359cf0c4b1fe89eea40dc13b52e7b4fd4812450da9fcd210181d"}, + {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19122296822deafce89a0c5e8685704c067ae65d45e79718c92df7b3ec3d331"}, + {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13618bed0c38acc418896005732e565b317aa9e98d855a0e9f211a7ffc2d6638"}, + {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:193e3bffca48ad74b8c764fb4492dd875038a2f9925530cb094db92bb5e47bed"}, + {file = "coverage-7.6.8-cp310-cp310-win32.whl", hash = "sha256:3988665ee376abce49613701336544041f2117de7b7fbfe91b93d8ff8b151c8e"}, + {file = "coverage-7.6.8-cp310-cp310-win_amd64.whl", hash = "sha256:f56f49b2553d7dd85fd86e029515a221e5c1f8cb3d9c38b470bc38bde7b8445a"}, + {file = "coverage-7.6.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86cffe9c6dfcfe22e28027069725c7f57f4b868a3f86e81d1c62462764dc46d4"}, + {file = "coverage-7.6.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d82ab6816c3277dc962cfcdc85b1efa0e5f50fb2c449432deaf2398a2928ab94"}, + {file = "coverage-7.6.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13690e923a3932e4fad4c0ebfb9cb5988e03d9dcb4c5150b5fcbf58fd8bddfc4"}, + {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be32da0c3827ac9132bb488d331cb32e8d9638dd41a0557c5569d57cf22c9c1"}, + {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44e6c85bbdc809383b509d732b06419fb4544dca29ebe18480379633623baafb"}, + {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:768939f7c4353c0fac2f7c37897e10b1414b571fd85dd9fc49e6a87e37a2e0d8"}, + {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e44961e36cb13c495806d4cac67640ac2866cb99044e210895b506c26ee63d3a"}, + {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ea8bb1ab9558374c0ab591783808511d135a833c3ca64a18ec927f20c4030f0"}, + {file = "coverage-7.6.8-cp311-cp311-win32.whl", hash = "sha256:629a1ba2115dce8bf75a5cce9f2486ae483cb89c0145795603d6554bdc83e801"}, + {file = "coverage-7.6.8-cp311-cp311-win_amd64.whl", hash = "sha256:fb9fc32399dca861584d96eccd6c980b69bbcd7c228d06fb74fe53e007aa8ef9"}, + {file = "coverage-7.6.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e683e6ecc587643f8cde8f5da6768e9d165cd31edf39ee90ed7034f9ca0eefee"}, + {file = "coverage-7.6.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1defe91d41ce1bd44b40fabf071e6a01a5aa14de4a31b986aa9dfd1b3e3e414a"}, + {file = "coverage-7.6.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7ad66e8e50225ebf4236368cc43c37f59d5e6728f15f6e258c8639fa0dd8e6d"}, + {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fe47da3e4fda5f1abb5709c156eca207eacf8007304ce3019eb001e7a7204cb"}, + {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:202a2d645c5a46b84992f55b0a3affe4f0ba6b4c611abec32ee88358db4bb649"}, + {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4674f0daa1823c295845b6a740d98a840d7a1c11df00d1fd62614545c1583787"}, + {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:74610105ebd6f33d7c10f8907afed696e79c59e3043c5f20eaa3a46fddf33b4c"}, + {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37cda8712145917105e07aab96388ae76e787270ec04bcb9d5cc786d7cbb8443"}, + {file = "coverage-7.6.8-cp312-cp312-win32.whl", hash = "sha256:9e89d5c8509fbd6c03d0dd1972925b22f50db0792ce06324ba069f10787429ad"}, + {file = "coverage-7.6.8-cp312-cp312-win_amd64.whl", hash = "sha256:379c111d3558272a2cae3d8e57e6b6e6f4fe652905692d54bad5ea0ca37c5ad4"}, + {file = "coverage-7.6.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b0c69f4f724c64dfbfe79f5dfb503b42fe6127b8d479b2677f2b227478db2eb"}, + {file = "coverage-7.6.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c15b32a7aca8038ed7644f854bf17b663bc38e1671b5d6f43f9a2b2bd0c46f63"}, + {file = "coverage-7.6.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63068a11171e4276f6ece913bde059e77c713b48c3a848814a6537f35afb8365"}, + {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4548c5ead23ad13fb7a2c8ea541357474ec13c2b736feb02e19a3085fac002"}, + {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4b4299dd0d2c67caaaf286d58aef5e75b125b95615dda4542561a5a566a1e3"}, + {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9ebfb2507751f7196995142f057d1324afdab56db1d9743aab7f50289abd022"}, + {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c1b4474beee02ede1eef86c25ad4600a424fe36cff01a6103cb4533c6bf0169e"}, + {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d9fd2547e6decdbf985d579cf3fc78e4c1d662b9b0ff7cc7862baaab71c9cc5b"}, + {file = "coverage-7.6.8-cp313-cp313-win32.whl", hash = "sha256:8aae5aea53cbfe024919715eca696b1a3201886ce83790537d1c3668459c7146"}, + {file = "coverage-7.6.8-cp313-cp313-win_amd64.whl", hash = "sha256:ae270e79f7e169ccfe23284ff5ea2d52a6f401dc01b337efb54b3783e2ce3f28"}, + {file = "coverage-7.6.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:de38add67a0af869b0d79c525d3e4588ac1ffa92f39116dbe0ed9753f26eba7d"}, + {file = "coverage-7.6.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b07c25d52b1c16ce5de088046cd2432b30f9ad5e224ff17c8f496d9cb7d1d451"}, + {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a66ff235e4c2e37ed3b6104d8b478d767ff73838d1222132a7a026aa548764"}, + {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09b9f848b28081e7b975a3626e9081574a7b9196cde26604540582da60235fdf"}, + {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:093896e530c38c8e9c996901858ac63f3d4171268db2c9c8b373a228f459bbc5"}, + {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a7b8ac36fd688c8361cbc7bf1cb5866977ece6e0b17c34aa0df58bda4fa18a4"}, + {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:38c51297b35b3ed91670e1e4efb702b790002e3245a28c76e627478aa3c10d83"}, + {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2e4e0f60cb4bd7396108823548e82fdab72d4d8a65e58e2c19bbbc2f1e2bfa4b"}, + {file = "coverage-7.6.8-cp313-cp313t-win32.whl", hash = "sha256:6535d996f6537ecb298b4e287a855f37deaf64ff007162ec0afb9ab8ba3b8b71"}, + {file = "coverage-7.6.8-cp313-cp313t-win_amd64.whl", hash = "sha256:c79c0685f142ca53256722a384540832420dff4ab15fec1863d7e5bc8691bdcc"}, + {file = "coverage-7.6.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ac47fa29d8d41059ea3df65bd3ade92f97ee4910ed638e87075b8e8ce69599e"}, + {file = "coverage-7.6.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:24eda3a24a38157eee639ca9afe45eefa8d2420d49468819ac5f88b10de84f4c"}, + {file = "coverage-7.6.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4c81ed2820b9023a9a90717020315e63b17b18c274a332e3b6437d7ff70abe0"}, + {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd55f8fc8fa494958772a2a7302b0354ab16e0b9272b3c3d83cdb5bec5bd1779"}, + {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f39e2f3530ed1626c66e7493be7a8423b023ca852aacdc91fb30162c350d2a92"}, + {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:716a78a342679cd1177bc8c2fe957e0ab91405bd43a17094324845200b2fddf4"}, + {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:177f01eeaa3aee4a5ffb0d1439c5952b53d5010f86e9d2667963e632e30082cc"}, + {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:912e95017ff51dc3d7b6e2be158dedc889d9a5cc3382445589ce554f1a34c0ea"}, + {file = "coverage-7.6.8-cp39-cp39-win32.whl", hash = "sha256:4db3ed6a907b555e57cc2e6f14dc3a4c2458cdad8919e40b5357ab9b6db6c43e"}, + {file = "coverage-7.6.8-cp39-cp39-win_amd64.whl", hash = "sha256:428ac484592f780e8cd7b6b14eb568f7c85460c92e2a37cb0c0e5186e1a0d076"}, + {file = "coverage-7.6.8-pp39.pp310-none-any.whl", hash = "sha256:5c52a036535d12590c32c49209e79cabaad9f9ad8aa4cbd875b68c4d67a9cbce"}, + {file = "coverage-7.6.8.tar.gz", hash = "sha256:8b2b8503edb06822c86d82fa64a4a5cb0760bb8f31f26e138ec743f422f37cfc"}, ] [package.extras] @@ -1138,7 +1133,7 @@ tests = ["pytest", "pytest-cov", "pytest-xdist"] name = "dataclasses-json" version = "0.6.7" description = "Easily serialize dataclasses to and from JSON." -optional = true +optional = false python-versions = "<4.0,>=3.7" files = [ {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, @@ -1233,6 +1228,45 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +[[package]] +name = "deepeval" +version = "2.0.1" +description = "The open-source LLMs evaluation framework." +optional = false +python-versions = "<3.13,>=3.9" +files = [ + {file = "deepeval-2.0.1-py3-none-any.whl", hash = "sha256:e06134a36f6b2a9173c92bdebe1e42325f4e27ded77bbf4b5323fa955682b6ea"}, + {file = "deepeval-2.0.1.tar.gz", hash = "sha256:63360c4e0101d7ad29419bc754fba239d0d02d969bf5c070d387cf8b973cab01"}, +] + +[package.dependencies] +docx2txt = ">=0.8,<1.0" +grpcio = "1.60.1" +importlib-metadata = ">=6.0.2" +langchain = "*" +langchain-community = "*" +langchain-core = "*" +langchain_openai = "*" +opentelemetry-api = ">=1.24.0,<2.0.0" +opentelemetry-exporter-otlp-proto-grpc = ">=1.24.0,<2.0.0" +opentelemetry-sdk = ">=1.24.0,<2.0.0" +portalocker = "*" +protobuf = "*" +pydantic = "*" +pytest = "*" +pytest-repeat = "*" +pytest-xdist = "*" +requests = "*" +rich = "*" +sentry-sdk = "*" +tabulate = "*" +tenacity = ">=8.4.1,<8.5.0" +tqdm = "*" +typer = "*" + +[package.extras] +dev = ["black"] + [[package]] name = "defusedxml" version = "0.7.1" @@ -1246,20 +1280,20 @@ files = [ [[package]] name = "deprecated" -version = "1.2.14" +version = "1.2.15" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, + {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, + {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"}, ] [package.dependencies] wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"] [[package]] name = "deprecation" @@ -1340,16 +1374,17 @@ files = [ [[package]] name = "dlt" -version = "1.3.0" +version = "1.4.0" description = "dlt is an open-source python-first scalable data loading library that does not require any backend to run." optional = false python-versions = "<3.13,>=3.8.1" files = [ - {file = "dlt-1.3.0-py3-none-any.whl", hash = "sha256:e2583ed0ad4a0d9941b8f9cb0e078f4443bcbeb0e1cf1cce586cf35107ccf266"}, - {file = "dlt-1.3.0.tar.gz", hash = "sha256:57eecee99ace25b6d37027a78f59f8c735d1913cc81f1101e1b47bf96fc544b8"}, + {file = "dlt-1.4.0-py3-none-any.whl", hash = "sha256:c3a69e4067581bf0335796bec62d58058ff1f11249f16b699d6657544b126247"}, + {file = "dlt-1.4.0.tar.gz", hash = "sha256:75208448dc11dd501cf15d76742368816fef8e1b22fb07417f69d5ceb720b324"}, ] [package.dependencies] +aiohttp = ">=3.9" alembic = {version = ">1.10.0", optional = true, markers = "extra == \"sqlalchemy\""} astunparse = ">=1.6.3" click = ">=7.1" @@ -1386,7 +1421,7 @@ bigquery = ["db-dtypes (>=1.2.0)", "gcsfs (>=2022.4.0)", "google-cloud-bigquery cli = ["cron-descriptor (>=1.2.32)", "pipdeptree (>=2.9.0,<2.10)"] clickhouse = ["adlfs (>=2022.4.0)", "clickhouse-connect (>=0.7.7)", "clickhouse-driver (>=0.2.7)", "gcsfs (>=2022.4.0)", "pyarrow (>=12.0.0)", "s3fs (>=2022.4.0)"] databricks = ["databricks-sql-connector (>=2.9.3)"] -deltalake = ["deltalake (>=0.19.0)", "pyarrow (>=12.0.0)"] +deltalake = ["deltalake (>=0.21.0)", "pyarrow (>=12.0.0)"] dremio = ["pyarrow (>=12.0.0)"] duckdb = ["duckdb (>=0.9)"] filesystem = ["botocore (>=1.28)", "s3fs (>=2022.4.0)", "sqlglot (>=20.0.0)"] @@ -1438,6 +1473,16 @@ files = [ {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, ] +[[package]] +name = "docx2txt" +version = "0.8" +description = "A pure python-based utility to extract text and images from docx files." +optional = false +python-versions = "*" +files = [ + {file = "docx2txt-0.8.tar.gz", hash = "sha256:2c06d98d7cfe2d3947e5760a57d924e3ff07745b379c8737723922e7009236e5"}, +] + [[package]] name = "email-validator" version = "2.2.0" @@ -1467,6 +1512,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "execnet" +version = "2.1.1" +description = "execnet: rapid multi-Python deployment" +optional = false +python-versions = ">=3.8" +files = [ + {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, + {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, +] + +[package.extras] +testing = ["hatch", "pre-commit", "pytest", "tox"] + [[package]] name = "executing" version = "2.1.0" @@ -1556,13 +1615,13 @@ sqlalchemy = {version = ">=2.0.0,<2.1.0", extras = ["asyncio"]} [[package]] name = "fastjsonschema" -version = "2.20.0" +version = "2.21.1" description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" files = [ - {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, - {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, + {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, + {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, ] [package.extras] @@ -1597,59 +1656,61 @@ files = [ [[package]] name = "fonttools" -version = "4.54.1" +version = "4.55.0" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.54.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ed7ee041ff7b34cc62f07545e55e1468808691dddfd315d51dd82a6b37ddef2"}, - {file = "fonttools-4.54.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41bb0b250c8132b2fcac148e2e9198e62ff06f3cc472065dff839327945c5882"}, - {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7965af9b67dd546e52afcf2e38641b5be956d68c425bef2158e95af11d229f10"}, - {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278913a168f90d53378c20c23b80f4e599dca62fbffae4cc620c8eed476b723e"}, - {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0e88e3018ac809b9662615072dcd6b84dca4c2d991c6d66e1970a112503bba7e"}, - {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4aa4817f0031206e637d1e685251ac61be64d1adef111060df84fdcbc6ab6c44"}, - {file = "fonttools-4.54.1-cp310-cp310-win32.whl", hash = "sha256:7e3b7d44e18c085fd8c16dcc6f1ad6c61b71ff463636fcb13df7b1b818bd0c02"}, - {file = "fonttools-4.54.1-cp310-cp310-win_amd64.whl", hash = "sha256:dd9cc95b8d6e27d01e1e1f1fae8559ef3c02c76317da650a19047f249acd519d"}, - {file = "fonttools-4.54.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5419771b64248484299fa77689d4f3aeed643ea6630b2ea750eeab219588ba20"}, - {file = "fonttools-4.54.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:301540e89cf4ce89d462eb23a89464fef50915255ece765d10eee8b2bf9d75b2"}, - {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ae5091547e74e7efecc3cbf8e75200bc92daaeb88e5433c5e3e95ea8ce5aa7"}, - {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82834962b3d7c5ca98cb56001c33cf20eb110ecf442725dc5fdf36d16ed1ab07"}, - {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d26732ae002cc3d2ecab04897bb02ae3f11f06dd7575d1df46acd2f7c012a8d8"}, - {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58974b4987b2a71ee08ade1e7f47f410c367cdfc5a94fabd599c88165f56213a"}, - {file = "fonttools-4.54.1-cp311-cp311-win32.whl", hash = "sha256:ab774fa225238986218a463f3fe151e04d8c25d7de09df7f0f5fce27b1243dbc"}, - {file = "fonttools-4.54.1-cp311-cp311-win_amd64.whl", hash = "sha256:07e005dc454eee1cc60105d6a29593459a06321c21897f769a281ff2d08939f6"}, - {file = "fonttools-4.54.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:54471032f7cb5fca694b5f1a0aaeba4af6e10ae989df408e0216f7fd6cdc405d"}, - {file = "fonttools-4.54.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fa92cb248e573daab8d032919623cc309c005086d743afb014c836636166f08"}, - {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a911591200114969befa7f2cb74ac148bce5a91df5645443371aba6d222e263"}, - {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93d458c8a6a354dc8b48fc78d66d2a8a90b941f7fec30e94c7ad9982b1fa6bab"}, - {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5eb2474a7c5be8a5331146758debb2669bf5635c021aee00fd7c353558fc659d"}, - {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c9c563351ddc230725c4bdf7d9e1e92cbe6ae8553942bd1fb2b2ff0884e8b714"}, - {file = "fonttools-4.54.1-cp312-cp312-win32.whl", hash = "sha256:fdb062893fd6d47b527d39346e0c5578b7957dcea6d6a3b6794569370013d9ac"}, - {file = "fonttools-4.54.1-cp312-cp312-win_amd64.whl", hash = "sha256:e4564cf40cebcb53f3dc825e85910bf54835e8a8b6880d59e5159f0f325e637e"}, - {file = "fonttools-4.54.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6e37561751b017cf5c40fce0d90fd9e8274716de327ec4ffb0df957160be3bff"}, - {file = "fonttools-4.54.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:357cacb988a18aace66e5e55fe1247f2ee706e01debc4b1a20d77400354cddeb"}, - {file = "fonttools-4.54.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e953cc0bddc2beaf3a3c3b5dd9ab7554677da72dfaf46951e193c9653e515a"}, - {file = "fonttools-4.54.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58d29b9a294573d8319f16f2f79e42428ba9b6480442fa1836e4eb89c4d9d61c"}, - {file = "fonttools-4.54.1-cp313-cp313-win32.whl", hash = "sha256:9ef1b167e22709b46bf8168368b7b5d3efeaaa746c6d39661c1b4405b6352e58"}, - {file = "fonttools-4.54.1-cp313-cp313-win_amd64.whl", hash = "sha256:262705b1663f18c04250bd1242b0515d3bbae177bee7752be67c979b7d47f43d"}, - {file = "fonttools-4.54.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ed2f80ca07025551636c555dec2b755dd005e2ea8fbeb99fc5cdff319b70b23b"}, - {file = "fonttools-4.54.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9dc080e5a1c3b2656caff2ac2633d009b3a9ff7b5e93d0452f40cd76d3da3b3c"}, - {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d152d1be65652fc65e695e5619e0aa0982295a95a9b29b52b85775243c06556"}, - {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8583e563df41fdecef31b793b4dd3af8a9caa03397be648945ad32717a92885b"}, - {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0d1d353ef198c422515a3e974a1e8d5b304cd54a4c2eebcae708e37cd9eeffb1"}, - {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fda582236fee135d4daeca056c8c88ec5f6f6d88a004a79b84a02547c8f57386"}, - {file = "fonttools-4.54.1-cp38-cp38-win32.whl", hash = "sha256:e7d82b9e56716ed32574ee106cabca80992e6bbdcf25a88d97d21f73a0aae664"}, - {file = "fonttools-4.54.1-cp38-cp38-win_amd64.whl", hash = "sha256:ada215fd079e23e060157aab12eba0d66704316547f334eee9ff26f8c0d7b8ab"}, - {file = "fonttools-4.54.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5b8a096e649768c2f4233f947cf9737f8dbf8728b90e2771e2497c6e3d21d13"}, - {file = "fonttools-4.54.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e10d2e0a12e18f4e2dd031e1bf7c3d7017be5c8dbe524d07706179f355c5dac"}, - {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c32d7d4b0958600eac75eaf524b7b7cb68d3a8c196635252b7a2c30d80e986"}, - {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c39287f5c8f4a0c5a55daf9eaf9ccd223ea59eed3f6d467133cc727d7b943a55"}, - {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a7a310c6e0471602fe3bf8efaf193d396ea561486aeaa7adc1f132e02d30c4b9"}, - {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d3b659d1029946f4ff9b6183984578041b520ce0f8fb7078bb37ec7445806b33"}, - {file = "fonttools-4.54.1-cp39-cp39-win32.whl", hash = "sha256:e96bc94c8cda58f577277d4a71f51c8e2129b8b36fd05adece6320dd3d57de8a"}, - {file = "fonttools-4.54.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8a4b261c1ef91e7188a30571be6ad98d1c6d9fa2427244c545e2fa0a2494dd7"}, - {file = "fonttools-4.54.1-py3-none-any.whl", hash = "sha256:37cddd62d83dc4f72f7c3f3c2bcf2697e89a30efb152079896544a93907733bd"}, - {file = "fonttools-4.54.1.tar.gz", hash = "sha256:957f669d4922f92c171ba01bef7f29410668db09f6c02111e22b2bce446f3285"}, + {file = "fonttools-4.55.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:51c029d4c0608a21a3d3d169dfc3fb776fde38f00b35ca11fdab63ba10a16f61"}, + {file = "fonttools-4.55.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bca35b4e411362feab28e576ea10f11268b1aeed883b9f22ed05675b1e06ac69"}, + {file = "fonttools-4.55.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ce4ba6981e10f7e0ccff6348e9775ce25ffadbee70c9fd1a3737e3e9f5fa74f"}, + {file = "fonttools-4.55.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31d00f9852a6051dac23294a4cf2df80ced85d1d173a61ba90a3d8f5abc63c60"}, + {file = "fonttools-4.55.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e198e494ca6e11f254bac37a680473a311a88cd40e58f9cc4dc4911dfb686ec6"}, + {file = "fonttools-4.55.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7208856f61770895e79732e1dcbe49d77bd5783adf73ae35f87fcc267df9db81"}, + {file = "fonttools-4.55.0-cp310-cp310-win32.whl", hash = "sha256:e7e6a352ff9e46e8ef8a3b1fe2c4478f8a553e1b5a479f2e899f9dc5f2055880"}, + {file = "fonttools-4.55.0-cp310-cp310-win_amd64.whl", hash = "sha256:636caaeefe586d7c84b5ee0734c1a5ab2dae619dc21c5cf336f304ddb8f6001b"}, + {file = "fonttools-4.55.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fa34aa175c91477485c44ddfbb51827d470011e558dfd5c7309eb31bef19ec51"}, + {file = "fonttools-4.55.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:37dbb3fdc2ef7302d3199fb12468481cbebaee849e4b04bc55b77c24e3c49189"}, + {file = "fonttools-4.55.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5263d8e7ef3c0ae87fbce7f3ec2f546dc898d44a337e95695af2cd5ea21a967"}, + {file = "fonttools-4.55.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f307f6b5bf9e86891213b293e538d292cd1677e06d9faaa4bf9c086ad5f132f6"}, + {file = "fonttools-4.55.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f0a4b52238e7b54f998d6a56b46a2c56b59c74d4f8a6747fb9d4042190f37cd3"}, + {file = "fonttools-4.55.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3e569711464f777a5d4ef522e781dc33f8095ab5efd7548958b36079a9f2f88c"}, + {file = "fonttools-4.55.0-cp311-cp311-win32.whl", hash = "sha256:2b3ab90ec0f7b76c983950ac601b58949f47aca14c3f21eed858b38d7ec42b05"}, + {file = "fonttools-4.55.0-cp311-cp311-win_amd64.whl", hash = "sha256:aa046f6a63bb2ad521004b2769095d4c9480c02c1efa7d7796b37826508980b6"}, + {file = "fonttools-4.55.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:838d2d8870f84fc785528a692e724f2379d5abd3fc9dad4d32f91cf99b41e4a7"}, + {file = "fonttools-4.55.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f46b863d74bab7bb0d395f3b68d3f52a03444964e67ce5c43ce43a75efce9246"}, + {file = "fonttools-4.55.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33b52a9cfe4e658e21b1f669f7309b4067910321757fec53802ca8f6eae96a5a"}, + {file = "fonttools-4.55.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:732a9a63d6ea4a81b1b25a1f2e5e143761b40c2e1b79bb2b68e4893f45139a40"}, + {file = "fonttools-4.55.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7dd91ac3fcb4c491bb4763b820bcab6c41c784111c24172616f02f4bc227c17d"}, + {file = "fonttools-4.55.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f0e115281a32ff532118aa851ef497a1b7cda617f4621c1cdf81ace3e36fb0c"}, + {file = "fonttools-4.55.0-cp312-cp312-win32.whl", hash = "sha256:6c99b5205844f48a05cb58d4a8110a44d3038c67ed1d79eb733c4953c628b0f6"}, + {file = "fonttools-4.55.0-cp312-cp312-win_amd64.whl", hash = "sha256:f8c8c76037d05652510ae45be1cd8fb5dd2fd9afec92a25374ac82255993d57c"}, + {file = "fonttools-4.55.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8118dc571921dc9e4b288d9cb423ceaf886d195a2e5329cc427df82bba872cd9"}, + {file = "fonttools-4.55.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01124f2ca6c29fad4132d930da69158d3f49b2350e4a779e1efbe0e82bd63f6c"}, + {file = "fonttools-4.55.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ffd58d2691f11f7c8438796e9f21c374828805d33e83ff4b76e4635633674c"}, + {file = "fonttools-4.55.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5435e5f1eb893c35c2bc2b9cd3c9596b0fcb0a59e7a14121562986dd4c47b8dd"}, + {file = "fonttools-4.55.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d12081729280c39d001edd0f4f06d696014c26e6e9a0a55488fabc37c28945e4"}, + {file = "fonttools-4.55.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7ad1f1b98ab6cb927ab924a38a8649f1ffd7525c75fe5b594f5dab17af70e18"}, + {file = "fonttools-4.55.0-cp313-cp313-win32.whl", hash = "sha256:abe62987c37630dca69a104266277216de1023cf570c1643bb3a19a9509e7a1b"}, + {file = "fonttools-4.55.0-cp313-cp313-win_amd64.whl", hash = "sha256:2863555ba90b573e4201feaf87a7e71ca3b97c05aa4d63548a4b69ea16c9e998"}, + {file = "fonttools-4.55.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:00f7cf55ad58a57ba421b6a40945b85ac7cc73094fb4949c41171d3619a3a47e"}, + {file = "fonttools-4.55.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f27526042efd6f67bfb0cc2f1610fa20364396f8b1fc5edb9f45bb815fb090b2"}, + {file = "fonttools-4.55.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e67974326af6a8879dc2a4ec63ab2910a1c1a9680ccd63e4a690950fceddbe"}, + {file = "fonttools-4.55.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61dc0a13451143c5e987dec5254d9d428f3c2789a549a7cf4f815b63b310c1cc"}, + {file = "fonttools-4.55.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b2e526b325a903868c62155a6a7e24df53f6ce4c5c3160214d8fe1be2c41b478"}, + {file = "fonttools-4.55.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b7ef9068a1297714e6fefe5932c33b058aa1d45a2b8be32a4c6dee602ae22b5c"}, + {file = "fonttools-4.55.0-cp38-cp38-win32.whl", hash = "sha256:55718e8071be35dff098976bc249fc243b58efa263768c611be17fe55975d40a"}, + {file = "fonttools-4.55.0-cp38-cp38-win_amd64.whl", hash = "sha256:553bd4f8cc327f310c20158e345e8174c8eed49937fb047a8bda51daf2c353c8"}, + {file = "fonttools-4.55.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f901cef813f7c318b77d1c5c14cf7403bae5cb977cede023e22ba4316f0a8f6"}, + {file = "fonttools-4.55.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c9679fc0dd7e8a5351d321d8d29a498255e69387590a86b596a45659a39eb0d"}, + {file = "fonttools-4.55.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2820a8b632f3307ebb0bf57948511c2208e34a4939cf978333bc0a3f11f838"}, + {file = "fonttools-4.55.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23bbbb49bec613a32ed1b43df0f2b172313cee690c2509f1af8fdedcf0a17438"}, + {file = "fonttools-4.55.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a656652e1f5d55b9728937a7e7d509b73d23109cddd4e89ee4f49bde03b736c6"}, + {file = "fonttools-4.55.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f50a1f455902208486fbca47ce33054208a4e437b38da49d6721ce2fef732fcf"}, + {file = "fonttools-4.55.0-cp39-cp39-win32.whl", hash = "sha256:161d1ac54c73d82a3cded44202d0218ab007fde8cf194a23d3dd83f7177a2f03"}, + {file = "fonttools-4.55.0-cp39-cp39-win_amd64.whl", hash = "sha256:ca7fd6987c68414fece41c96836e945e1f320cda56fc96ffdc16e54a44ec57a2"}, + {file = "fonttools-4.55.0-py3-none-any.whl", hash = "sha256:12db5888cd4dd3fcc9f0ee60c6edd3c7e1fd44b7dd0f31381ea03df68f8a153f"}, + {file = "fonttools-4.55.0.tar.gz", hash = "sha256:7636acc6ab733572d5e7eec922b254ead611f1cdad17be3f0be7418e8bfaca71"}, ] [package.extras] @@ -1880,6 +1941,23 @@ files = [ {file = "giturlparse-0.12.0.tar.gz", hash = "sha256:c0fff7c21acc435491b1779566e038757a205c1ffdcb47e4f81ea52ad8c3859a"}, ] +[[package]] +name = "googleapis-common-protos" +version = "1.66.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed"}, + {file = "googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c"}, +] + +[package.dependencies] +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + [[package]] name = "graphistry" version = "0.33.9" @@ -2039,153 +2117,151 @@ typing-extensions = ">=4.7,<5" [[package]] name = "grpcio" -version = "1.67.1" +version = "1.60.1" description = "HTTP/2-based RPC framework" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "grpcio-1.67.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f"}, - {file = "grpcio-1.67.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d"}, - {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:43112046864317498a33bdc4797ae6a268c36345a910de9b9c17159d8346602f"}, - {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9b929f13677b10f63124c1a410994a401cdd85214ad83ab67cc077fc7e480f0"}, - {file = "grpcio-1.67.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7d1797a8a3845437d327145959a2c0c47c05947c9eef5ff1a4c80e499dcc6fa"}, - {file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0489063974d1452436139501bf6b180f63d4977223ee87488fe36858c5725292"}, - {file = "grpcio-1.67.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9fd042de4a82e3e7aca44008ee2fb5da01b3e5adb316348c21980f7f58adc311"}, - {file = "grpcio-1.67.1-cp310-cp310-win32.whl", hash = "sha256:638354e698fd0c6c76b04540a850bf1db27b4d2515a19fcd5cf645c48d3eb1ed"}, - {file = "grpcio-1.67.1-cp310-cp310-win_amd64.whl", hash = "sha256:608d87d1bdabf9e2868b12338cd38a79969eaf920c89d698ead08f48de9c0f9e"}, - {file = "grpcio-1.67.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:7818c0454027ae3384235a65210bbf5464bd715450e30a3d40385453a85a70cb"}, - {file = "grpcio-1.67.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ea33986b70f83844cd00814cee4451055cd8cab36f00ac64a31f5bb09b31919e"}, - {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c7a01337407dd89005527623a4a72c5c8e2894d22bead0895306b23c6695698f"}, - {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b866f73224b0634f4312a4674c1be21b2b4afa73cb20953cbbb73a6b36c3cc"}, - {file = "grpcio-1.67.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9fff78ba10d4250bfc07a01bd6254a6d87dc67f9627adece85c0b2ed754fa96"}, - {file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8a23cbcc5bb11ea7dc6163078be36c065db68d915c24f5faa4f872c573bb400f"}, - {file = "grpcio-1.67.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1a65b503d008f066e994f34f456e0647e5ceb34cfcec5ad180b1b44020ad4970"}, - {file = "grpcio-1.67.1-cp311-cp311-win32.whl", hash = "sha256:e29ca27bec8e163dca0c98084040edec3bc49afd10f18b412f483cc68c712744"}, - {file = "grpcio-1.67.1-cp311-cp311-win_amd64.whl", hash = "sha256:786a5b18544622bfb1e25cc08402bd44ea83edfb04b93798d85dca4d1a0b5be5"}, - {file = "grpcio-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953"}, - {file = "grpcio-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb"}, - {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0"}, - {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af"}, - {file = "grpcio-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e"}, - {file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75"}, - {file = "grpcio-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38"}, - {file = "grpcio-1.67.1-cp312-cp312-win32.whl", hash = "sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78"}, - {file = "grpcio-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc"}, - {file = "grpcio-1.67.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b"}, - {file = "grpcio-1.67.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1"}, - {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af"}, - {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955"}, - {file = "grpcio-1.67.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8"}, - {file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62"}, - {file = "grpcio-1.67.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb"}, - {file = "grpcio-1.67.1-cp313-cp313-win32.whl", hash = "sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121"}, - {file = "grpcio-1.67.1-cp313-cp313-win_amd64.whl", hash = "sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba"}, - {file = "grpcio-1.67.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:178f5db771c4f9a9facb2ab37a434c46cb9be1a75e820f187ee3d1e7805c4f65"}, - {file = "grpcio-1.67.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0f3e49c738396e93b7ba9016e153eb09e0778e776df6090c1b8c91877cc1c426"}, - {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:24e8a26dbfc5274d7474c27759b54486b8de23c709d76695237515bc8b5baeab"}, - {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b6c16489326d79ead41689c4b84bc40d522c9a7617219f4ad94bc7f448c5085"}, - {file = "grpcio-1.67.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e6a4dcf5af7bbc36fd9f81c9f372e8ae580870a9e4b6eafe948cd334b81cf3"}, - {file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:95b5f2b857856ed78d72da93cd7d09b6db8ef30102e5e7fe0961fe4d9f7d48e8"}, - {file = "grpcio-1.67.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b49359977c6ec9f5d0573ea4e0071ad278ef905aa74e420acc73fd28ce39e9ce"}, - {file = "grpcio-1.67.1-cp38-cp38-win32.whl", hash = "sha256:f5b76ff64aaac53fede0cc93abf57894ab2a7362986ba22243d06218b93efe46"}, - {file = "grpcio-1.67.1-cp38-cp38-win_amd64.whl", hash = "sha256:804c6457c3cd3ec04fe6006c739579b8d35c86ae3298ffca8de57b493524b771"}, - {file = "grpcio-1.67.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:a25bdea92b13ff4d7790962190bf6bf5c4639876e01c0f3dda70fc2769616335"}, - {file = "grpcio-1.67.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cdc491ae35a13535fd9196acb5afe1af37c8237df2e54427be3eecda3653127e"}, - {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:85f862069b86a305497e74d0dc43c02de3d1d184fc2c180993aa8aa86fbd19b8"}, - {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec74ef02010186185de82cc594058a3ccd8d86821842bbac9873fd4a2cf8be8d"}, - {file = "grpcio-1.67.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01f616a964e540638af5130469451cf580ba8c7329f45ca998ab66e0c7dcdb04"}, - {file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:299b3d8c4f790c6bcca485f9963b4846dd92cf6f1b65d3697145d005c80f9fe8"}, - {file = "grpcio-1.67.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:60336bff760fbb47d7e86165408126f1dded184448e9a4c892189eb7c9d3f90f"}, - {file = "grpcio-1.67.1-cp39-cp39-win32.whl", hash = "sha256:5ed601c4c6008429e3d247ddb367fe8c7259c355757448d7c1ef7bd4a6739e8e"}, - {file = "grpcio-1.67.1-cp39-cp39-win_amd64.whl", hash = "sha256:5db70d32d6703b89912af16d6d45d78406374a8b8ef0d28140351dd0ec610e98"}, - {file = "grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732"}, + {file = "grpcio-1.60.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:14e8f2c84c0832773fb3958240c69def72357bc11392571f87b2d7b91e0bb092"}, + {file = "grpcio-1.60.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:33aed0a431f5befeffd9d346b0fa44b2c01aa4aeae5ea5b2c03d3e25e0071216"}, + {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:fead980fbc68512dfd4e0c7b1f5754c2a8e5015a04dea454b9cada54a8423525"}, + {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:082081e6a36b6eb5cf0fd9a897fe777dbb3802176ffd08e3ec6567edd85bc104"}, + {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55ccb7db5a665079d68b5c7c86359ebd5ebf31a19bc1a91c982fd622f1e31ff2"}, + {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b54577032d4f235452f77a83169b6527bf4b77d73aeada97d45b2aaf1bf5ce0"}, + {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7d142bcd604166417929b071cd396aa13c565749a4c840d6c702727a59d835eb"}, + {file = "grpcio-1.60.1-cp310-cp310-win32.whl", hash = "sha256:2a6087f234cb570008a6041c8ffd1b7d657b397fdd6d26e83d72283dae3527b1"}, + {file = "grpcio-1.60.1-cp310-cp310-win_amd64.whl", hash = "sha256:f2212796593ad1d0235068c79836861f2201fc7137a99aa2fea7beeb3b101177"}, + {file = "grpcio-1.60.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:79ae0dc785504cb1e1788758c588c711f4e4a0195d70dff53db203c95a0bd303"}, + {file = "grpcio-1.60.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4eec8b8c1c2c9b7125508ff7c89d5701bf933c99d3910e446ed531cd16ad5d87"}, + {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8c9554ca8e26241dabe7951aa1fa03a1ba0856688ecd7e7bdbdd286ebc272e4c"}, + {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91422ba785a8e7a18725b1dc40fbd88f08a5bb4c7f1b3e8739cab24b04fa8a03"}, + {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cba6209c96828711cb7c8fcb45ecef8c8859238baf15119daa1bef0f6c84bfe7"}, + {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c71be3f86d67d8d1311c6076a4ba3b75ba5703c0b856b4e691c9097f9b1e8bd2"}, + {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5ef6cfaf0d023c00002ba25d0751e5995fa0e4c9eec6cd263c30352662cbce"}, + {file = "grpcio-1.60.1-cp311-cp311-win32.whl", hash = "sha256:a09506eb48fa5493c58f946c46754ef22f3ec0df64f2b5149373ff31fb67f3dd"}, + {file = "grpcio-1.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:49c9b6a510e3ed8df5f6f4f3c34d7fbf2d2cae048ee90a45cd7415abab72912c"}, + {file = "grpcio-1.60.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b58b855d0071575ea9c7bc0d84a06d2edfbfccec52e9657864386381a7ce1ae9"}, + {file = "grpcio-1.60.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:a731ac5cffc34dac62053e0da90f0c0b8560396a19f69d9703e88240c8f05858"}, + {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:cf77f8cf2a651fbd869fbdcb4a1931464189cd210abc4cfad357f1cacc8642a6"}, + {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c557e94e91a983e5b1e9c60076a8fd79fea1e7e06848eb2e48d0ccfb30f6e073"}, + {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:069fe2aeee02dfd2135d562d0663fe70fbb69d5eed6eb3389042a7e963b54de8"}, + {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb0af13433dbbd1c806e671d81ec75bd324af6ef75171fd7815ca3074fe32bfe"}, + {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2f44c32aef186bbba254129cea1df08a20be414144ac3bdf0e84b24e3f3b2e05"}, + {file = "grpcio-1.60.1-cp312-cp312-win32.whl", hash = "sha256:a212e5dea1a4182e40cd3e4067ee46be9d10418092ce3627475e995cca95de21"}, + {file = "grpcio-1.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:6e490fa5f7f5326222cb9f0b78f207a2b218a14edf39602e083d5f617354306f"}, + {file = "grpcio-1.60.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:4216e67ad9a4769117433814956031cb300f85edc855252a645a9a724b3b6594"}, + {file = "grpcio-1.60.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:73e14acd3d4247169955fae8fb103a2b900cfad21d0c35f0dcd0fdd54cd60367"}, + {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:6ecf21d20d02d1733e9c820fb5c114c749d888704a7ec824b545c12e78734d1c"}, + {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bdea30dcfd4f87b045d404388469eb48a48c33a6195a043d116ed1b9a0196c"}, + {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53b69e79d00f78c81eecfb38f4516080dc7f36a198b6b37b928f1c13b3c063e9"}, + {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:39aa848794b887120b1d35b1b994e445cc028ff602ef267f87c38122c1add50d"}, + {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:72153a0d2e425f45b884540a61c6639436ddafa1829a42056aa5764b84108b8e"}, + {file = "grpcio-1.60.1-cp37-cp37m-win_amd64.whl", hash = "sha256:50d56280b482875d1f9128ce596e59031a226a8b84bec88cb2bf76c289f5d0de"}, + {file = "grpcio-1.60.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:6d140bdeb26cad8b93c1455fa00573c05592793c32053d6e0016ce05ba267549"}, + {file = "grpcio-1.60.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:bc808924470643b82b14fe121923c30ec211d8c693e747eba8a7414bc4351a23"}, + {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:70c83bb530572917be20c21f3b6be92cd86b9aecb44b0c18b1d3b2cc3ae47df0"}, + {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b106bc52e7f28170e624ba61cc7dc6829566e535a6ec68528f8e1afbed1c41f"}, + {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30e980cd6db1088c144b92fe376747328d5554bc7960ce583ec7b7d81cd47287"}, + {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c5807e9152eff15f1d48f6b9ad3749196f79a4a050469d99eecb679be592acc"}, + {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1c3dc536b3ee124e8b24feb7533e5c70b9f2ef833e3b2e5513b2897fd46763a"}, + {file = "grpcio-1.60.1-cp38-cp38-win32.whl", hash = "sha256:d7404cebcdb11bb5bd40bf94131faf7e9a7c10a6c60358580fe83913f360f929"}, + {file = "grpcio-1.60.1-cp38-cp38-win_amd64.whl", hash = "sha256:c8754c75f55781515a3005063d9a05878b2cfb3cb7e41d5401ad0cf19de14872"}, + {file = "grpcio-1.60.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:0250a7a70b14000fa311de04b169cc7480be6c1a769b190769d347939d3232a8"}, + {file = "grpcio-1.60.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:660fc6b9c2a9ea3bb2a7e64ba878c98339abaf1811edca904ac85e9e662f1d73"}, + {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:76eaaba891083fcbe167aa0f03363311a9f12da975b025d30e94b93ac7a765fc"}, + {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d97c65ea7e097056f3d1ead77040ebc236feaf7f71489383d20f3b4c28412a"}, + {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb2a2911b028f01c8c64d126f6b632fcd8a9ac975aa1b3855766c94e4107180"}, + {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5a1ebbae7e2214f51b1f23b57bf98eeed2cf1ba84e4d523c48c36d5b2f8829ff"}, + {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a66f4d2a005bc78e61d805ed95dedfcb35efa84b7bba0403c6d60d13a3de2d6"}, + {file = "grpcio-1.60.1-cp39-cp39-win32.whl", hash = "sha256:8d488fbdbf04283f0d20742b64968d44825617aa6717b07c006168ed16488804"}, + {file = "grpcio-1.60.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b7199cd2a55e62e45bfb629a35b71fc2c0cb88f686a047f25b1112d3810904"}, + {file = "grpcio-1.60.1.tar.gz", hash = "sha256:dd1d3a8d1d2e50ad9b59e10aa7f07c7d1be2b367f3f2d33c5fade96ed5460962"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.67.1)"] +protobuf = ["grpcio-tools (>=1.60.1)"] [[package]] name = "grpcio-health-checking" -version = "1.67.1" +version = "1.60.1" description = "Standard Health Checking Service for gRPC" optional = false -python-versions = ">=3.8" +python-versions = ">=3.6" files = [ - {file = "grpcio_health_checking-1.67.1-py3-none-any.whl", hash = "sha256:93753da5062152660aef2286c9b261e07dd87124a65e4dc9fbd47d1ce966b39d"}, - {file = "grpcio_health_checking-1.67.1.tar.gz", hash = "sha256:ca90fa76a6afbb4fda71d734cb9767819bba14928b91e308cffbb0c311eb941e"}, + {file = "grpcio-health-checking-1.60.1.tar.gz", hash = "sha256:7c2e48ce9d5bdb19ad57b7abe3438d47ebdead507866939140720b3e28c625b3"}, + {file = "grpcio_health_checking-1.60.1-py3-none-any.whl", hash = "sha256:96dc89800cabdc943a5734e037fadea5f94c31c76e043638e448c1865b34d894"}, ] [package.dependencies] -grpcio = ">=1.67.1" -protobuf = ">=5.26.1,<6.0dev" +grpcio = ">=1.60.1" +protobuf = ">=4.21.6" [[package]] name = "grpcio-tools" -version = "1.67.1" +version = "1.60.1" description = "Protobuf code generator for gRPC" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "grpcio_tools-1.67.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:c701aaa51fde1f2644bd94941aa94c337adb86f25cd03cf05e37387aaea25800"}, - {file = "grpcio_tools-1.67.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:6a722bba714392de2386569c40942566b83725fa5c5450b8910e3832a5379469"}, - {file = "grpcio_tools-1.67.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0c7415235cb154e40b5ae90e2a172a0eb8c774b6876f53947cf0af05c983d549"}, - {file = "grpcio_tools-1.67.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a4c459098c4934f9470280baf9ff8b38c365e147f33c8abc26039a948a664a5"}, - {file = "grpcio_tools-1.67.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e89bf53a268f55c16989dab1cf0b32a5bff910762f138136ffad4146129b7a10"}, - {file = "grpcio_tools-1.67.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f09cb3e6bcb140f57b878580cf3b848976f67faaf53d850a7da9bfac12437068"}, - {file = "grpcio_tools-1.67.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:616dd0c6686212ca90ff899bb37eb774798677e43dc6f78c6954470782d37399"}, - {file = "grpcio_tools-1.67.1-cp310-cp310-win32.whl", hash = "sha256:58a66dbb3f0fef0396737ac09d6571a7f8d96a544ce3ed04c161f3d4fa8d51cc"}, - {file = "grpcio_tools-1.67.1-cp310-cp310-win_amd64.whl", hash = "sha256:89ee7c505bdf152e67c2cced6055aed4c2d4170f53a2b46a7e543d3b90e7b977"}, - {file = "grpcio_tools-1.67.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:6d80ddd87a2fb7131d242f7d720222ef4f0f86f53ec87b0a6198c343d8e4a86e"}, - {file = "grpcio_tools-1.67.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b655425b82df51f3bd9fd3ba1a6282d5c9ce1937709f059cb3d419b224532d89"}, - {file = "grpcio_tools-1.67.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:250241e6f9d20d0910a46887dfcbf2ec9108efd3b48f3fb95bb42d50d09d03f8"}, - {file = "grpcio_tools-1.67.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6008f5a5add0b6f03082edb597acf20d5a9e4e7c55ea1edac8296c19e6a0ec8d"}, - {file = "grpcio_tools-1.67.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5eff9818c3831fa23735db1fa39aeff65e790044d0a312260a0c41ae29cc2d9e"}, - {file = "grpcio_tools-1.67.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:262ab7c40113f8c3c246e28e369661ddf616a351cb34169b8ba470c9a9c3b56f"}, - {file = "grpcio_tools-1.67.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1eebd8c746adf5786fa4c3056258c21cc470e1eca51d3ed23a7fb6a697fe4e81"}, - {file = "grpcio_tools-1.67.1-cp311-cp311-win32.whl", hash = "sha256:3eff92fb8ca1dd55e3af0ef02236c648921fb7d0e8ca206b889585804b3659ae"}, - {file = "grpcio_tools-1.67.1-cp311-cp311-win_amd64.whl", hash = "sha256:1ed18281ee17e5e0f9f6ce0c6eb3825ca9b5a0866fc1db2e17fab8aca28b8d9f"}, - {file = "grpcio_tools-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:bd5caef3a484e226d05a3f72b2d69af500dca972cf434bf6b08b150880166f0b"}, - {file = "grpcio_tools-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:48a2d63d1010e5b218e8e758ecb2a8d63c0c6016434e9f973df1c3558917020a"}, - {file = "grpcio_tools-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:baa64a6aa009bffe86309e236c81b02cd4a88c1ebd66f2d92e84e9b97a9ae857"}, - {file = "grpcio_tools-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ab318c40b5e3c097a159035fc3e4ecfbe9b3d2c9de189e55468b2c27639a6ab"}, - {file = "grpcio_tools-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50eba3e31f9ac1149463ad9182a37349850904f142cffbd957cd7f54ec320b8e"}, - {file = "grpcio_tools-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:de6fbc071ecc4fe6e354a7939202191c1f1abffe37fbce9b08e7e9a5b93eba3d"}, - {file = "grpcio_tools-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:db9e87f6ea4b0ce99b2651203480585fd9e8dd0dd122a19e46836e93e3a1b749"}, - {file = "grpcio_tools-1.67.1-cp312-cp312-win32.whl", hash = "sha256:6a595a872fb720dde924c4e8200f41d5418dd6baab8cc1a3c1e540f8f4596351"}, - {file = "grpcio_tools-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:92eebb9b31031604ae97ea7657ae2e43149b0394af7117ad7e15894b6cc136dc"}, - {file = "grpcio_tools-1.67.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:9a3b9510cc87b6458b05ad49a6dee38df6af37f9ee6aa027aa086537798c3d4a"}, - {file = "grpcio_tools-1.67.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9e4c9b9fa9b905f15d414cb7bd007ba7499f8907bdd21231ab287a86b27da81a"}, - {file = "grpcio_tools-1.67.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:e11a98b41af4bc88b7a738232b8fa0306ad82c79fa5d7090bb607f183a57856f"}, - {file = "grpcio_tools-1.67.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de0fcfe61c26679d64b1710746f2891f359593f76894fcf492c37148d5694f00"}, - {file = "grpcio_tools-1.67.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ae3b3e2ee5aad59dece65a613624c46a84c9582fc3642686537c6dfae8e47dc"}, - {file = "grpcio_tools-1.67.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:9a630f83505b6471a3094a7a372a1240de18d0cd3e64f4fbf46b361bac2be65b"}, - {file = "grpcio_tools-1.67.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d85a1fcbacd3e08dc2b3d1d46b749351a9a50899fa35cf2ff040e1faf7d405ad"}, - {file = "grpcio_tools-1.67.1-cp313-cp313-win32.whl", hash = "sha256:778470f025f25a1fca5a48c93c0a18af395b46b12dd8df7fca63736b85181f41"}, - {file = "grpcio_tools-1.67.1-cp313-cp313-win_amd64.whl", hash = "sha256:6961da86e9856b4ddee0bf51ef6636b4bf9c29c0715aa71f3c8f027c45d42654"}, - {file = "grpcio_tools-1.67.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:c088dfbbe289bb171ca9c98fabbf7ecc8c1c51af2ba384ef32a4fdcb784b17e9"}, - {file = "grpcio_tools-1.67.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11ce546daf8f8c04ee8d4a1673b4754cda4a0a9d505d820efd636e37f46b50c5"}, - {file = "grpcio_tools-1.67.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:83fecb2f6119ef0eea68a091964898418c1969375d399956ff8d1741beb7b081"}, - {file = "grpcio_tools-1.67.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d39c1aa6b26e2602d815b9cfa37faba48b2889680ae6baa002560cf0f0c69fac"}, - {file = "grpcio_tools-1.67.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e975dc9fb61a77d88e739eb17b3361f369d03cc754217f02dd83ec7cfac32e38"}, - {file = "grpcio_tools-1.67.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6c6e5c5b15f2eedc2a81268d588d14a79a52020383bf87b3c7595df7b571504a"}, - {file = "grpcio_tools-1.67.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a974e0ce01806adba718e6eb8c385defe6805b18969b6914da7db55fb055ae45"}, - {file = "grpcio_tools-1.67.1-cp38-cp38-win32.whl", hash = "sha256:35e9b0a82be9f425aa67ee1dc69ba02cf135aeee3f22c0455c5d1b01769bbdb4"}, - {file = "grpcio_tools-1.67.1-cp38-cp38-win_amd64.whl", hash = "sha256:0436c97f29e654d2eccd7419907ee019caf7eea6bdc6ae91d98011f6c5f44f17"}, - {file = "grpcio_tools-1.67.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:718fbb6d68a3d000cb3cf381642660eade0e8c1b0bf7472b84b3367f5b56171d"}, - {file = "grpcio_tools-1.67.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:062887d2e9cb8bc261c21a2b8da714092893ce62b4e072775eaa9b24dcbf3b31"}, - {file = "grpcio_tools-1.67.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:59dbf14a1ce928bf03a58fa157034374411159ab5d32ad83cf146d9400eed618"}, - {file = "grpcio_tools-1.67.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac552fc9c76d50408d7141e1fd1eae69d85fbf7ae71da4d8877eaa07127fbe74"}, - {file = "grpcio_tools-1.67.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c6583773400e441dc62d08b5a32357babef1a9f9f73c3ac328a75af550815a9"}, - {file = "grpcio_tools-1.67.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:862108f90f2f6408908e5ea4584c5104f7caf419c6d73aa3ff36bf8284cca224"}, - {file = "grpcio_tools-1.67.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:587c6326425f37dca2291f46b93e446c07ee781cea27725865b806b7a049ec56"}, - {file = "grpcio_tools-1.67.1-cp39-cp39-win32.whl", hash = "sha256:d7d46a4405bd763525215b6e073888386587aef9b4a5ec125bf97ba897ac757d"}, - {file = "grpcio_tools-1.67.1-cp39-cp39-win_amd64.whl", hash = "sha256:e2fc7980e8bab3ee5ab98b6fdc2a8fbaa4785f196d897531346176fda49a605c"}, - {file = "grpcio_tools-1.67.1.tar.gz", hash = "sha256:d9657f5ddc62b52f58904e6054b7d8a8909ed08a1e28b734be3a707087bcf004"}, + {file = "grpcio-tools-1.60.1.tar.gz", hash = "sha256:da08224ab8675c6d464b988bd8ca02cccd2bf0275bceefe8f6219bfd4a4f5e85"}, + {file = "grpcio_tools-1.60.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:184b27333b627a7cc0972fb70d21a8bb7c02ac4a6febc16768d78ea8ff883ddd"}, + {file = "grpcio_tools-1.60.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:18d7737f29ef5bbe3352547d0eccd080807834f00df223867dfc860bf81e9180"}, + {file = "grpcio_tools-1.60.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:cc8ba358d2c658c6ecbc58e779bf0fc5a673fecac015a70db27fc5b4d37b76b6"}, + {file = "grpcio_tools-1.60.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2973f75e8ba5c551033a1d59cc97654f6f386deaf2559082011d245d7ed87bba"}, + {file = "grpcio_tools-1.60.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28ae665113affebdd109247386786e5ab4dccfcfad1b5f68e9cce2e326b57ee6"}, + {file = "grpcio_tools-1.60.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5c7ed086fef5ff59f46d53a052b1934b73e0f7d12365d656d6af3a88057d5a3e"}, + {file = "grpcio_tools-1.60.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8540f6480428a52614db71dd6394f52cbc0d2565b5ea1136a982f26390a42c7a"}, + {file = "grpcio_tools-1.60.1-cp310-cp310-win32.whl", hash = "sha256:5b4a939097005531edec331f22d0b82bff26e71ede009354d2f375b5d41e74f0"}, + {file = "grpcio_tools-1.60.1-cp310-cp310-win_amd64.whl", hash = "sha256:075bb67895970f96aabc1761ca674bf4db193f8fcad387f08e50402023b5f953"}, + {file = "grpcio_tools-1.60.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:284749d20fb22418f17d3d351b9eb838caf4a0393a9cb02c36e5c32fa4bbe9db"}, + {file = "grpcio_tools-1.60.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:b1041377cf32ee2338284ee26e6b9c10f9ea7728092376b19803dcb9b91d510d"}, + {file = "grpcio_tools-1.60.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e529cd3d4109a6f4a3f7bdaca68946eb33734e2d7ffe861785a0586abe99ee67"}, + {file = "grpcio_tools-1.60.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31294b534f25f02ead204e58dcbe0e5437a95a1a6f276bb9378905595b02ff6d"}, + {file = "grpcio_tools-1.60.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fb6f4d2df0388c35c2804ba170f511238a681b679ead013bfe5e39d0ea9cf48"}, + {file = "grpcio_tools-1.60.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:40cd8268a675269ce59c4fa50877597ec638bb1099c52237bb726c8ac9791868"}, + {file = "grpcio_tools-1.60.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:985ac476da365267a2367ab20060f9096fbfc2e190fb02dd394f9ec05edf03ca"}, + {file = "grpcio_tools-1.60.1-cp311-cp311-win32.whl", hash = "sha256:bd85f6c368b93ae45edf8568473053cb1cc075ef3489efb18f9832d4ecce062f"}, + {file = "grpcio_tools-1.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:c20e752ff5057758845f4e5c7a298739bfba291f373ed18ea9c7c7acbe69e8ab"}, + {file = "grpcio_tools-1.60.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:aafc94616c5f89c891d859057b194a153c451f9921053454e9d7d4cbf79047eb"}, + {file = "grpcio_tools-1.60.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:9bba347000f57dae8aea79c0d76ef7d72895597524d30d0170c7d1974a3a03f3"}, + {file = "grpcio_tools-1.60.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:1e96a532d38411f0543fe1903ff522f7142a9901afb0ed94de58d79caf1905be"}, + {file = "grpcio_tools-1.60.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ea6e397d87f458bb2c387a4a6e1b65df74ce5b5194a1f16850c38309012e981"}, + {file = "grpcio_tools-1.60.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aeecd5b8faa2aab67e6c8b8a57e888c00ce70d39f331ede0a21312e92def1a6"}, + {file = "grpcio_tools-1.60.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d2c26ce5f774c98bd2d3d8d1703048394018b55d297ebdb41ed2ba35b9a34f68"}, + {file = "grpcio_tools-1.60.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:214281cdafb7acfdcde848eca2de7c888a6e2b5cd25ab579712b965ea09a9cd4"}, + {file = "grpcio_tools-1.60.1-cp312-cp312-win32.whl", hash = "sha256:8c4b917aa4fcdc77990773063f0f14540aab8d4a8bf6c862b964a45d891a31d2"}, + {file = "grpcio_tools-1.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:0aa34c7c21cff2177a4096b2b0d51dfbc9f8a41f929847a434e89b352c5a215d"}, + {file = "grpcio_tools-1.60.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:acdba77584981fe799104aa545d9d97910bcf88c69b668b768c1f3e7d7e5afac"}, + {file = "grpcio_tools-1.60.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:2a7fa55bc62d4b8ebe6fb26f8cf89df3cf3b504eb6c5f3a2f0174689d35fddb0"}, + {file = "grpcio_tools-1.60.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:dffa326cf901fe08a0e218d9fdf593f12276088a8caa07fcbec7d051149cf9ef"}, + {file = "grpcio_tools-1.60.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf945bd22f396c0d0c691e0990db2bfc4e77816b1edc2aea8a69c35ae721aac9"}, + {file = "grpcio_tools-1.60.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6801cfc5a85f0fb6fd12cade45942aaa1c814422328d594d12d364815fe34123"}, + {file = "grpcio_tools-1.60.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f95bdc6c7c50b7fc442e53537bc5b4eb8cab2a671c1da80d40b5a4ab1fd5d416"}, + {file = "grpcio_tools-1.60.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:402efeec36d8b12b792bae8a900085416fc2f57a34b599445ace2e847b6b0d75"}, + {file = "grpcio_tools-1.60.1-cp37-cp37m-win_amd64.whl", hash = "sha256:af88a2062b9c35034a80b25f289034b9c3c00c42bb88efaa465503a06fbd6a87"}, + {file = "grpcio_tools-1.60.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:46b495bae31c5d3f6ac0240eb848f0642b5410f80dff2aacdea20cdea3938c1d"}, + {file = "grpcio_tools-1.60.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:b5ae375207af9aa82f516dcd513d2e0c83690b7788d45844daad846ed87550f8"}, + {file = "grpcio_tools-1.60.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:15f13e8f3d77b96adcb1e3615acec5b100bd836c6010c58a51465bcb9c06d128"}, + {file = "grpcio_tools-1.60.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c354505e6a3d170da374f20404ea6a78135502df4f5534e5c532bdf24c4cc2a5"}, + {file = "grpcio_tools-1.60.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8cfab27ba2bd36a3e3b522aed686133531e8b919703d0247a0885dae8815317"}, + {file = "grpcio_tools-1.60.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b6ef213cb0aecb2832ee82a2eac32f29f31f50b17ce020604d82205096a6bd0c"}, + {file = "grpcio_tools-1.60.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0b62cb2d43a7f0eacc6a6962dfff7c2564874012e1a72ae4167e762f449e2912"}, + {file = "grpcio_tools-1.60.1-cp38-cp38-win32.whl", hash = "sha256:3fcabf484720a9fa1690e2825fc940027a05a0c79a1075a730008ef634bd8ad2"}, + {file = "grpcio_tools-1.60.1-cp38-cp38-win_amd64.whl", hash = "sha256:22ce3e3d861321d208d8bfd6161ab976623520b179712c90b2c175151463a6b1"}, + {file = "grpcio_tools-1.60.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:4e66fe204da15e08e599adb3060109a42927c0868fe8933e2d341ea649eceb03"}, + {file = "grpcio_tools-1.60.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:c1047bd831de5d9da761e9dc246988d5f07d722186938dfd5f34807398101010"}, + {file = "grpcio_tools-1.60.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:eba5fafd70585fbd4cb6ae45e3c5e11d8598e2426c9f289b78f682c0606e81cb"}, + {file = "grpcio_tools-1.60.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bba7230c60238c7a4ffa29f1aff6d78edb41f2c79cbe4443406472b1c80ccb5d"}, + {file = "grpcio_tools-1.60.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2bb8efc2cd64bd8f2779b426dd7e94e60924078ba5150cbbb60a846e62d1ed2"}, + {file = "grpcio_tools-1.60.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:26f91161a91f1601777751230eaaafdf416fed08a15c3ba2ae391088e4a906c6"}, + {file = "grpcio_tools-1.60.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2c19be2bba5583e30f88bb5d71b430176c396f0d6d0db3785e5845bfa3d28cd2"}, + {file = "grpcio_tools-1.60.1-cp39-cp39-win32.whl", hash = "sha256:9aadc9c00baa2064baa4414cff7c269455449f14805a355226674d89c507342c"}, + {file = "grpcio_tools-1.60.1-cp39-cp39-win_amd64.whl", hash = "sha256:652b08c9fef39186ce4f97f05f5440c0ed41f117db0f7d6cb0e0d75dbc6afd3f"}, ] [package.dependencies] -grpcio = ">=1.67.1" -protobuf = ">=5.26.1,<6.0dev" +grpcio = ">=1.60.1" +protobuf = ">=4.21.6,<5.0dev" setuptools = "*" [[package]] @@ -2273,13 +2349,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.6" +version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, - {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [package.dependencies] @@ -2317,15 +2393,26 @@ cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +[[package]] +name = "httpx-sse" +version = "0.4.0" +description = "Consume Server-Sent Event (SSE) messages with HTTPX." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, + {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, +] + [[package]] name = "huggingface-hub" -version = "0.26.2" +version = "0.26.3" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.26.2-py3-none-any.whl", hash = "sha256:98c2a5a8e786c7b2cb6fdeb2740893cba4d53e312572ed3d8afafda65b128c46"}, - {file = "huggingface_hub-0.26.2.tar.gz", hash = "sha256:b100d853465d965733964d123939ba287da60a547087783ddff8a323f340332b"}, + {file = "huggingface_hub-0.26.3-py3-none-any.whl", hash = "sha256:e66aa99e569c2d5419240a9e553ad07245a5b1300350bfbc5a4945cf7432991b"}, + {file = "huggingface_hub-0.26.3.tar.gz", hash = "sha256:90e1fe62ffc26757a073aaad618422b899ccf9447c2bba8c902a90bef5b42e1d"}, ] [package.dependencies] @@ -2392,26 +2479,22 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 [[package]] name = "importlib-metadata" -version = "8.5.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, - {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] -zipp = ">=3.20" +zipp = ">=0.5" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "importlib-resources" @@ -2717,17 +2800,17 @@ files = [ [[package]] name = "json5" -version = "0.9.27" +version = "0.10.0" description = "A Python implementation of the JSON5 data format." optional = false python-versions = ">=3.8.0" files = [ - {file = "json5-0.9.27-py3-none-any.whl", hash = "sha256:17b43d78d3a6daeca4d7030e9bf22092dba29b1282cc2d0cfa56f6febee8dc93"}, - {file = "json5-0.9.27.tar.gz", hash = "sha256:5a19de4a6ca24ba664dc7d50307eb73ba9a16dea5d6bde85677ae85d3ed2d8e0"}, + {file = "json5-0.10.0-py3-none-any.whl", hash = "sha256:19b23410220a7271e8377f81ba8aacba2fdd56947fbb137ee5977cbe1f5e8dfa"}, + {file = "json5-0.10.0.tar.gz", hash = "sha256:e66941c8f0a02026943c52c2eb34ebeb2a6f819a0be05920a6f5243cd30fd559"}, ] [package.extras] -dev = ["build (==1.2.1)", "coverage (==7.5.3)", "mypy (==1.10.0)", "pip (==24.1)", "pylint (==3.2.3)", "ruff (==0.5.1)", "twine (==5.1.1)", "uv (==0.2.13)"] +dev = ["build (==1.2.2.post1)", "coverage (==7.5.3)", "mypy (==1.13.0)", "pip (==24.3.1)", "pylint (==3.2.3)", "ruff (==0.7.3)", "twine (==5.1.1)", "uv (==0.5.1)"] [[package]] name = "jsonpatch" @@ -2950,13 +3033,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.2.5" +version = "4.2.6" description = "JupyterLab computational environment" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.2.5-py3-none-any.whl", hash = "sha256:73b6e0775d41a9fee7ee756c80f58a6bed4040869ccc21411dc559818874d321"}, - {file = "jupyterlab-4.2.5.tar.gz", hash = "sha256:ae7f3a1b8cb88b4f55009ce79fa7c06f99d70cd63601ee4aa91815d054f46f75"}, + {file = "jupyterlab-4.2.6-py3-none-any.whl", hash = "sha256:78dd42cae5b460f377624b03966a8730e3b0692102ddf5933a2a3730c1bc0a20"}, + {file = "jupyterlab-4.2.6.tar.gz", hash = "sha256:625f3ac19da91f9706baf66df25723b2f1307c1159fc7293035b066786d62a4a"}, ] [package.dependencies] @@ -3178,14 +3261,63 @@ embeddings = ["awscli (>=1.29.57)", "boto3 (>=1.28.57)", "botocore (>=1.31.57)", tests = ["aiohttp", "boto3", "duckdb", "pandas (>=1.4)", "polars (>=0.19,<=1.3.0)", "pytest", "pytest-asyncio", "pytest-mock", "pytz", "tantivy"] [[package]] -name = "langchain-core" -version = "0.3.15" +name = "langchain" +version = "0.3.9" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_core-0.3.15-py3-none-any.whl", hash = "sha256:3d4ca6dbb8ed396a6ee061063832a2451b0ce8c345570f7b086ffa7288e4fa29"}, - {file = "langchain_core-0.3.15.tar.gz", hash = "sha256:b1a29787a4ffb7ec2103b4e97d435287201da7809b369740dd1e32f176325aba"}, + {file = "langchain-0.3.9-py3-none-any.whl", hash = "sha256:ade5a1fee2f94f2e976a6c387f97d62cc7f0b9f26cfe0132a41d2bda761e1045"}, + {file = "langchain-0.3.9.tar.gz", hash = "sha256:4950c4ad627d0aa95ce6bda7de453e22059b7e7836b562a8f781fb0b05d7294c"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} +langchain-core = ">=0.3.21,<0.4.0" +langchain-text-splitters = ">=0.3.0,<0.4.0" +langsmith = ">=0.1.17,<0.2.0" +numpy = {version = ">=1.22.4,<2", markers = "python_version < \"3.12\""} +pydantic = ">=2.7.4,<3.0.0" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10" + +[[package]] +name = "langchain-community" +version = "0.3.8" +description = "Community contributed LangChain integrations." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langchain_community-0.3.8-py3-none-any.whl", hash = "sha256:191b3fcdf6b2e92934f4daeba5f5d0ac684b03772b15ef9d3c3fbcd86bd6cd64"}, + {file = "langchain_community-0.3.8.tar.gz", hash = "sha256:f7575a717d95208d0e969c090104622783c6a38a5527657aa5aa38776fadc835"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +dataclasses-json = ">=0.5.7,<0.7" +httpx-sse = ">=0.4.0,<0.5.0" +langchain = ">=0.3.8,<0.4.0" +langchain-core = ">=0.3.21,<0.4.0" +langsmith = ">=0.1.125,<0.2.0" +numpy = {version = ">=1.22.4,<2", markers = "python_version < \"3.12\""} +pydantic-settings = ">=2.4.0,<3.0.0" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<2.0.36" +tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10" + +[[package]] +name = "langchain-core" +version = "0.3.21" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langchain_core-0.3.21-py3-none-any.whl", hash = "sha256:7e723dff80946a1198976c6876fea8326dc82566ef9bcb5f8d9188f738733665"}, + {file = "langchain_core-0.3.21.tar.gz", hash = "sha256:561b52b258ffa50a9fb11d7a1940ebfd915654d1ec95b35e81dfd5ee84143411"}, ] [package.dependencies] @@ -3197,6 +3329,22 @@ PyYAML = ">=5.3" tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10.0.0" typing-extensions = ">=4.7" +[[package]] +name = "langchain-openai" +version = "0.2.5" +description = "An integration package connecting OpenAI and LangChain" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langchain_openai-0.2.5-py3-none-any.whl", hash = "sha256:745fd9d51a5a3a9cb8839d41f3786ab38dfc539e47c713a806cbca32f3d0875c"}, + {file = "langchain_openai-0.2.5.tar.gz", hash = "sha256:55b98711a880474ec363267bf6cd0e2727dc00e8433731318d063a2184582c28"}, +] + +[package.dependencies] +langchain-core = ">=0.3.15,<0.4.0" +openai = ">=1.52.0,<2.0.0" +tiktoken = ">=0.7,<1" + [[package]] name = "langchain-text-splitters" version = "0.3.2" @@ -3227,13 +3375,13 @@ six = "*" [[package]] name = "langfuse" -version = "2.53.9" +version = "2.55.0" description = "A client library for accessing langfuse" optional = false -python-versions = "<4.0,>=3.8.1" +python-versions = "<4.0,>=3.9" files = [ - {file = "langfuse-2.53.9-py3-none-any.whl", hash = "sha256:04363bc323f7513621c88a997003f7b906ae8f5d096bd54221cfcb6bf7a6f16a"}, - {file = "langfuse-2.53.9.tar.gz", hash = "sha256:6bfecf86e28c684034ae52a0b19535c94cc86923085267b548d63e5c1ce2b82c"}, + {file = "langfuse-2.55.0-py3-none-any.whl", hash = "sha256:c902894b50befdd8639e5b0d78ca79f8b5d391bae2c3f0889a1f47ea59d795ad"}, + {file = "langfuse-2.55.0.tar.gz", hash = "sha256:fb671a8cf5252a7d1def3b41b6f70f8de50c53250e5832209649301c14453222"}, ] [package.dependencies] @@ -3243,6 +3391,7 @@ httpx = ">=0.15.4,<1.0" idna = ">=3.7,<4.0" packaging = ">=23.2,<25.0" pydantic = ">=1.10.7,<3.0" +requests = ">=2,<3" wrapt = ">=1.14,<2.0" [package.extras] @@ -3298,13 +3447,13 @@ proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", " [[package]] name = "llama-index-core" -version = "0.11.22" +version = "0.11.23" description = "Interface between LLMs and your data" optional = true python-versions = "<4.0,>=3.8.1" files = [ - {file = "llama_index_core-0.11.22-py3-none-any.whl", hash = "sha256:5c59d95dec9bb0727f25b03de89392c69076b2e4aaa6acbd8773de1f07502e9e"}, - {file = "llama_index_core-0.11.22.tar.gz", hash = "sha256:ddc30b9c873495de40ad8278d0c894ba09f32f6aa7fc638012b1b22b74c32553"}, + {file = "llama_index_core-0.11.23-py3-none-any.whl", hash = "sha256:25a0cb4a055bfb348655ca4acd1b475529bd8537a7b81874ef14ed13f56e06c1"}, + {file = "llama_index_core-0.11.23.tar.gz", hash = "sha256:e150859696a0eae169fe19323f46e9a31af2c12c3182012e4d0353ea8eb06d24"}, ] [package.dependencies] @@ -3312,6 +3461,7 @@ aiohttp = ">=3.8.6,<4.0.0" dataclasses-json = "*" deprecated = ">=1.2.9.3" dirtyjson = ">=1.0.8,<2.0.0" +filetype = ">=1.2.0,<2.0.0" fsspec = ">=2023.5.0" httpx = "*" nest-asyncio = ">=1.5.8,<2.0.0" @@ -3476,7 +3626,7 @@ files = [ name = "marshmallow" version = "3.23.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -optional = true +optional = false python-versions = ">=3.9" files = [ {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"}, @@ -3493,51 +3643,52 @@ tests = ["pytest", "simplejson"] [[package]] name = "matplotlib" -version = "3.9.2" +version = "3.9.3" description = "Python plotting package" optional = false python-versions = ">=3.9" files = [ - {file = "matplotlib-3.9.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9d78bbc0cbc891ad55b4f39a48c22182e9bdaea7fc0e5dbd364f49f729ca1bbb"}, - {file = "matplotlib-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c375cc72229614632c87355366bdf2570c2dac01ac66b8ad048d2dabadf2d0d4"}, - {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d94ff717eb2bd0b58fe66380bd8b14ac35f48a98e7c6765117fe67fb7684e64"}, - {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab68d50c06938ef28681073327795c5db99bb4666214d2d5f880ed11aeaded66"}, - {file = "matplotlib-3.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:65aacf95b62272d568044531e41de26285d54aec8cb859031f511f84bd8b495a"}, - {file = "matplotlib-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:3fd595f34aa8a55b7fc8bf9ebea8aa665a84c82d275190a61118d33fbc82ccae"}, - {file = "matplotlib-3.9.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8dd059447824eec055e829258ab092b56bb0579fc3164fa09c64f3acd478772"}, - {file = "matplotlib-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c797dac8bb9c7a3fd3382b16fe8f215b4cf0f22adccea36f1545a6d7be310b41"}, - {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d719465db13267bcef19ea8954a971db03b9f48b4647e3860e4bc8e6ed86610f"}, - {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8912ef7c2362f7193b5819d17dae8629b34a95c58603d781329712ada83f9447"}, - {file = "matplotlib-3.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7741f26a58a240f43bee74965c4882b6c93df3e7eb3de160126d8c8f53a6ae6e"}, - {file = "matplotlib-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ae82a14dab96fbfad7965403c643cafe6515e386de723e498cf3eeb1e0b70cc7"}, - {file = "matplotlib-3.9.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ac43031375a65c3196bee99f6001e7fa5bdfb00ddf43379d3c0609bdca042df9"}, - {file = "matplotlib-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be0fc24a5e4531ae4d8e858a1a548c1fe33b176bb13eff7f9d0d38ce5112a27d"}, - {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf81de2926c2db243c9b2cbc3917619a0fc85796c6ba4e58f541df814bbf83c7"}, - {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ee45bc4245533111ced13f1f2cace1e7f89d1c793390392a80c139d6cf0e6c"}, - {file = "matplotlib-3.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:306c8dfc73239f0e72ac50e5a9cf19cc4e8e331dd0c54f5e69ca8758550f1e1e"}, - {file = "matplotlib-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:5413401594cfaff0052f9d8b1aafc6d305b4bd7c4331dccd18f561ff7e1d3bd3"}, - {file = "matplotlib-3.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:18128cc08f0d3cfff10b76baa2f296fc28c4607368a8402de61bb3f2eb33c7d9"}, - {file = "matplotlib-3.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4876d7d40219e8ae8bb70f9263bcbe5714415acfdf781086601211335e24f8aa"}, - {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d9f07a80deab4bb0b82858a9e9ad53d1382fd122be8cde11080f4e7dfedb38b"}, - {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7c0410f181a531ec4e93bbc27692f2c71a15c2da16766f5ba9761e7ae518413"}, - {file = "matplotlib-3.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:909645cce2dc28b735674ce0931a4ac94e12f5b13f6bb0b5a5e65e7cea2c192b"}, - {file = "matplotlib-3.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:f32c7410c7f246838a77d6d1eff0c0f87f3cb0e7c4247aebea71a6d5a68cab49"}, - {file = "matplotlib-3.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:37e51dd1c2db16ede9cfd7b5cabdfc818b2c6397c83f8b10e0e797501c963a03"}, - {file = "matplotlib-3.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b82c5045cebcecd8496a4d694d43f9cc84aeeb49fe2133e036b207abe73f4d30"}, - {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f053c40f94bc51bc03832a41b4f153d83f2062d88c72b5e79997072594e97e51"}, - {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbe196377a8248972f5cede786d4c5508ed5f5ca4a1e09b44bda889958b33f8c"}, - {file = "matplotlib-3.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5816b1e1fe8c192cbc013f8f3e3368ac56fbecf02fb41b8f8559303f24c5015e"}, - {file = "matplotlib-3.9.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cef2a73d06601437be399908cf13aee74e86932a5ccc6ccdf173408ebc5f6bb2"}, - {file = "matplotlib-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0830e188029c14e891fadd99702fd90d317df294c3298aad682739c5533721a"}, - {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ba9c1299c920964e8d3857ba27173b4dbb51ca4bab47ffc2c2ba0eb5e2cbc5"}, - {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd93b91ab47a3616b4d3c42b52f8363b88ca021e340804c6ab2536344fad9ca"}, - {file = "matplotlib-3.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6d1ce5ed2aefcdce11904fc5bbea7d9c21fff3d5f543841edf3dea84451a09ea"}, - {file = "matplotlib-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:b2696efdc08648536efd4e1601b5fd491fd47f4db97a5fbfd175549a7365c1b2"}, - {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d52a3b618cb1cbb769ce2ee1dcdb333c3ab6e823944e9a2d36e37253815f9556"}, - {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:039082812cacd6c6bec8e17a9c1e6baca230d4116d522e81e1f63a74d01d2e21"}, - {file = "matplotlib-3.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6758baae2ed64f2331d4fd19be38b7b4eae3ecec210049a26b6a4f3ae1c85dcc"}, - {file = "matplotlib-3.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:050598c2b29e0b9832cde72bcf97627bf00262adbc4a54e2b856426bb2ef0697"}, - {file = "matplotlib-3.9.2.tar.gz", hash = "sha256:96ab43906269ca64a6366934106fa01534454a69e471b7bf3d79083981aaab92"}, + {file = "matplotlib-3.9.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:41b016e3be4e740b66c79a031a0a6e145728dbc248142e751e8dab4f3188ca1d"}, + {file = "matplotlib-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e0143975fc2a6d7136c97e19c637321288371e8f09cff2564ecd73e865ea0b9"}, + {file = "matplotlib-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f459c8ee2c086455744723628264e43c884be0c7d7b45d84b8cd981310b4815"}, + {file = "matplotlib-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:687df7ceff57b8f070d02b4db66f75566370e7ae182a0782b6d3d21b0d6917dc"}, + {file = "matplotlib-3.9.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:edd14cf733fdc4f6e6fe3f705af97676a7e52859bf0044aa2c84e55be739241c"}, + {file = "matplotlib-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:1c40c244221a1adbb1256692b1133c6fb89418df27bf759a31a333e7912a4010"}, + {file = "matplotlib-3.9.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cf2a60daf6cecff6828bc608df00dbc794380e7234d2411c0ec612811f01969d"}, + {file = "matplotlib-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:213d6dc25ce686516208d8a3e91120c6a4fdae4a3e06b8505ced5b716b50cc04"}, + {file = "matplotlib-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c52f48eb75fcc119a4fdb68ba83eb5f71656999420375df7c94cc68e0e14686e"}, + {file = "matplotlib-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3c93796b44fa111049b88a24105e947f03c01966b5c0cc782e2ee3887b790a3"}, + {file = "matplotlib-3.9.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cd1077b9a09b16d8c3c7075a8add5ffbfe6a69156a57e290c800ed4d435bef1d"}, + {file = "matplotlib-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:c96eeeb8c68b662c7747f91a385688d4b449687d29b691eff7068a4602fe6dc4"}, + {file = "matplotlib-3.9.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0a361bd5583bf0bcc08841df3c10269617ee2a36b99ac39d455a767da908bbbc"}, + {file = "matplotlib-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e14485bb1b83eeb3d55b6878f9560240981e7bbc7a8d4e1e8c38b9bd6ec8d2de"}, + {file = "matplotlib-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8d279f78844aad213c4935c18f8292a9432d51af2d88bca99072c903948045"}, + {file = "matplotlib-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6c12514329ac0d03128cf1dcceb335f4fbf7c11da98bca68dca8dcb983153a9"}, + {file = "matplotlib-3.9.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6e9de2b390d253a508dd497e9b5579f3a851f208763ed67fdca5dc0c3ea6849c"}, + {file = "matplotlib-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d796272408f8567ff7eaa00eb2856b3a00524490e47ad505b0b4ca6bb8a7411f"}, + {file = "matplotlib-3.9.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:203d18df84f5288973b2d56de63d4678cc748250026ca9e1ad8f8a0fd8a75d83"}, + {file = "matplotlib-3.9.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b651b0d3642991259109dc0351fc33ad44c624801367bb8307be9bfc35e427ad"}, + {file = "matplotlib-3.9.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66d7b171fecf96940ce069923a08ba3df33ef542de82c2ff4fe8caa8346fa95a"}, + {file = "matplotlib-3.9.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be0ba61f6ff2e6b68e4270fb63b6813c9e7dec3d15fc3a93f47480444fd72f0"}, + {file = "matplotlib-3.9.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d6b2e8856dec3a6db1ae51aec85c82223e834b228c1d3228aede87eee2b34f9"}, + {file = "matplotlib-3.9.3-cp313-cp313-win_amd64.whl", hash = "sha256:90a85a004fefed9e583597478420bf904bb1a065b0b0ee5b9d8d31b04b0f3f70"}, + {file = "matplotlib-3.9.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3119b2f16de7f7b9212ba76d8fe6a0e9f90b27a1e04683cd89833a991682f639"}, + {file = "matplotlib-3.9.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:87ad73763d93add1b6c1f9fcd33af662fd62ed70e620c52fcb79f3ac427cf3a6"}, + {file = "matplotlib-3.9.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:026bdf3137ab6022c866efa4813b6bbeddc2ed4c9e7e02f0e323a7bca380dfa0"}, + {file = "matplotlib-3.9.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760a5e89ebbb172989e8273024a1024b0f084510b9105261b3b00c15e9c9f006"}, + {file = "matplotlib-3.9.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a42b9dc42de2cfe357efa27d9c50c7833fc5ab9b2eb7252ccd5d5f836a84e1e4"}, + {file = "matplotlib-3.9.3-cp313-cp313t-win_amd64.whl", hash = "sha256:e0fcb7da73fbf67b5f4bdaa57d85bb585a4e913d4a10f3e15b32baea56a67f0a"}, + {file = "matplotlib-3.9.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:031b7f5b8e595cc07def77ec5b58464e9bb67dc5760be5d6f26d9da24892481d"}, + {file = "matplotlib-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fa6e193c14d6944e0685cdb527cb6b38b0e4a518043e7212f214113af7391da"}, + {file = "matplotlib-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6eefae6effa0c35bbbc18c25ee6e0b1da44d2359c3cd526eb0c9e703cf055d"}, + {file = "matplotlib-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d3e5c7a99bd28afb957e1ae661323b0800d75b419f24d041ed1cc5d844a764"}, + {file = "matplotlib-3.9.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:816a966d5d376bf24c92af8f379e78e67278833e4c7cbc9fa41872eec629a060"}, + {file = "matplotlib-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fb0b37c896172899a4a93d9442ffdc6f870165f59e05ce2e07c6fded1c15749"}, + {file = "matplotlib-3.9.3-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5f2a4ea08e6876206d511365b0bc234edc813d90b930be72c3011bbd7898796f"}, + {file = "matplotlib-3.9.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b081dac96ab19c54fd8558fac17c9d2c9cb5cc4656e7ed3261ddc927ba3e2c5"}, + {file = "matplotlib-3.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a0a63cb8404d1d1f94968ef35738900038137dab8af836b6c21bb6f03d75465"}, + {file = "matplotlib-3.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:896774766fd6be4571a43bc2fcbcb1dcca0807e53cab4a5bf88c4aa861a08e12"}, + {file = "matplotlib-3.9.3.tar.gz", hash = "sha256:cd5dbbc8e25cad5f706845c4d100e2c8b34691b412b93717ce38d8ae803bcfa5"}, ] [package.dependencies] @@ -3553,7 +3704,7 @@ pyparsing = ">=2.3.1" python-dateutil = ">=2.7" [package.extras] -dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6)", "setuptools (>=64)", "setuptools_scm (>=7)"] +dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"] [[package]] name = "matplotlib-inline" @@ -3679,13 +3830,13 @@ pyyaml = ">=5.1" [[package]] name = "mkdocs-material" -version = "9.5.44" +version = "9.5.47" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.44-py3-none-any.whl", hash = "sha256:47015f9c167d58a5ff5e682da37441fc4d66a1c79334bfc08d774763cacf69ca"}, - {file = "mkdocs_material-9.5.44.tar.gz", hash = "sha256:f3a6c968e524166b3f3ed1fb97d3ed3e0091183b0545cedf7156a2a6804c56c0"}, + {file = "mkdocs_material-9.5.47-py3-none-any.whl", hash = "sha256:53fb9c9624e7865da6ec807d116cd7be24b3cb36ab31b1d1d1a9af58c56009a2"}, + {file = "mkdocs_material-9.5.47.tar.gz", hash = "sha256:fc3b7a8e00ad896660bd3a5cc12ca0cb28bdc2bcbe2a946b5714c23ac91b0ede"}, ] [package.dependencies] @@ -3984,13 +4135,13 @@ files = [ [[package]] name = "nbclient" -version = "0.10.0" +version = "0.10.1" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false python-versions = ">=3.8.0" files = [ - {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, - {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, + {file = "nbclient-0.10.1-py3-none-any.whl", hash = "sha256:949019b9240d66897e442888cfb618f69ef23dc71c01cb5fced8499c2cfc084d"}, + {file = "nbclient-0.10.1.tar.gz", hash = "sha256:3e93e348ab27e712acd46fccd809139e356eb9a31aab641d1a7991a6eb4e6f68"}, ] [package.dependencies] @@ -4001,7 +4152,7 @@ traitlets = ">=5.4" [package.extras] dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] +docs = ["autodoc-traits", "flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "mock", "moto", "myst-parser", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling", "testpath", "xmltodict"] test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] [[package]] @@ -4065,21 +4216,21 @@ test = ["pep440", "pre-commit", "pytest", "testpath"] [[package]] name = "neo4j" -version = "5.26.0" +version = "5.27.0" description = "Neo4j Bolt driver for Python" optional = false python-versions = ">=3.7" files = [ - {file = "neo4j-5.26.0-py3-none-any.whl", hash = "sha256:511a6a9468ca89b521bf686f885a2070acc462b1d09821d43710bd477acdf11e"}, - {file = "neo4j-5.26.0.tar.gz", hash = "sha256:51b25ba127b7b9fdae1ddf48ae697ddfab331e60f4b6d8488d1fc1f74ec60dcc"}, + {file = "neo4j-5.27.0-py3-none-any.whl", hash = "sha256:929c14b9e5341267324eca170b39d1798b032bffacc26a0529eacaf678ae483f"}, + {file = "neo4j-5.27.0.tar.gz", hash = "sha256:f82ee807cd15b178898d83f41a66372e11719a25dd487fd7bea48fd4b7323765"}, ] [package.dependencies] pytz = "*" [package.extras] -numpy = ["numpy (>=1.7.0,<2.0.0)"] -pandas = ["numpy (>=1.7.0,<2.0.0)", "pandas (>=1.1.0,<3.0.0)"] +numpy = ["numpy (>=1.7.0,<3.0.0)"] +pandas = ["numpy (>=1.7.0,<3.0.0)", "pandas (>=1.1.0,<3.0.0)"] pyarrow = ["pyarrow (>=1.0.0)"] [[package]] @@ -4261,71 +4412,182 @@ typing-extensions = ">=4.11,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +[[package]] +name = "opentelemetry-api" +version = "1.27.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"}, + {file = "opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<=8.4.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.27.0" +description = "OpenTelemetry Protobuf encoding" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_common-1.27.0-py3-none-any.whl", hash = "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.27.0.tar.gz", hash = "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8"}, +] + +[package.dependencies] +opentelemetry-proto = "1.27.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.27.0" +description = "OpenTelemetry Collector Protobuf over gRPC Exporter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0-py3-none-any.whl", hash = "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0.tar.gz", hash = "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +grpcio = ">=1.0.0,<2.0.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.27.0" +opentelemetry-proto = "1.27.0" +opentelemetry-sdk = ">=1.27.0,<1.28.0" + +[[package]] +name = "opentelemetry-proto" +version = "1.27.0" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_proto-1.27.0-py3-none-any.whl", hash = "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace"}, + {file = "opentelemetry_proto-1.27.0.tar.gz", hash = "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6"}, +] + +[package.dependencies] +protobuf = ">=3.19,<5.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.27.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d"}, + {file = "opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f"}, +] + +[package.dependencies] +opentelemetry-api = "1.27.0" +opentelemetry-semantic-conventions = "0.48b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.48b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f"}, + {file = "opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +opentelemetry-api = "1.27.0" + [[package]] name = "orjson" -version = "3.10.11" +version = "3.10.12" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.11-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6dade64687f2bd7c090281652fe18f1151292d567a9302b34c2dbb92a3872f1f"}, - {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82f07c550a6ccd2b9290849b22316a609023ed851a87ea888c0456485a7d196a"}, - {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd9a187742d3ead9df2e49240234d728c67c356516cf4db018833a86f20ec18c"}, - {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77b0fed6f209d76c1c39f032a70df2d7acf24b1812ca3e6078fd04e8972685a3"}, - {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63fc9d5fe1d4e8868f6aae547a7b8ba0a2e592929245fff61d633f4caccdcdd6"}, - {file = "orjson-3.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65cd3e3bb4fbb4eddc3c1e8dce10dc0b73e808fcb875f9fab40c81903dd9323e"}, - {file = "orjson-3.10.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f67c570602300c4befbda12d153113b8974a3340fdcf3d6de095ede86c06d92"}, - {file = "orjson-3.10.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1f39728c7f7d766f1f5a769ce4d54b5aaa4c3f92d5b84817053cc9995b977acc"}, - {file = "orjson-3.10.11-cp310-none-win32.whl", hash = "sha256:1789d9db7968d805f3d94aae2c25d04014aae3a2fa65b1443117cd462c6da647"}, - {file = "orjson-3.10.11-cp310-none-win_amd64.whl", hash = "sha256:5576b1e5a53a5ba8f8df81872bb0878a112b3ebb1d392155f00f54dd86c83ff6"}, - {file = "orjson-3.10.11-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1444f9cb7c14055d595de1036f74ecd6ce15f04a715e73f33bb6326c9cef01b6"}, - {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdec57fe3b4bdebcc08a946db3365630332dbe575125ff3d80a3272ebd0ddafe"}, - {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eed32f33a0ea6ef36ccc1d37f8d17f28a1d6e8eefae5928f76aff8f1df85e67"}, - {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80df27dd8697242b904f4ea54820e2d98d3f51f91e97e358fc13359721233e4b"}, - {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:705f03cee0cb797256d54de6695ef219e5bc8c8120b6654dd460848d57a9af3d"}, - {file = "orjson-3.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03246774131701de8e7059b2e382597da43144a9a7400f178b2a32feafc54bd5"}, - {file = "orjson-3.10.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8b5759063a6c940a69c728ea70d7c33583991c6982915a839c8da5f957e0103a"}, - {file = "orjson-3.10.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:677f23e32491520eebb19c99bb34675daf5410c449c13416f7f0d93e2cf5f981"}, - {file = "orjson-3.10.11-cp311-none-win32.whl", hash = "sha256:a11225d7b30468dcb099498296ffac36b4673a8398ca30fdaec1e6c20df6aa55"}, - {file = "orjson-3.10.11-cp311-none-win_amd64.whl", hash = "sha256:df8c677df2f9f385fcc85ab859704045fa88d4668bc9991a527c86e710392bec"}, - {file = "orjson-3.10.11-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:360a4e2c0943da7c21505e47cf6bd725588962ff1d739b99b14e2f7f3545ba51"}, - {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:496e2cb45de21c369079ef2d662670a4892c81573bcc143c4205cae98282ba97"}, - {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7dfa8db55c9792d53c5952900c6a919cfa377b4f4534c7a786484a6a4a350c19"}, - {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51f3382415747e0dbda9dade6f1e1a01a9d37f630d8c9049a8ed0e385b7a90c0"}, - {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f35a1b9f50a219f470e0e497ca30b285c9f34948d3c8160d5ad3a755d9299433"}, - {file = "orjson-3.10.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f3b7c5803138e67028dde33450e054c87e0703afbe730c105f1fcd873496d5"}, - {file = "orjson-3.10.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f91d9eb554310472bd09f5347950b24442600594c2edc1421403d7610a0998fd"}, - {file = "orjson-3.10.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dfbb2d460a855c9744bbc8e36f9c3a997c4b27d842f3d5559ed54326e6911f9b"}, - {file = "orjson-3.10.11-cp312-none-win32.whl", hash = "sha256:d4a62c49c506d4d73f59514986cadebb7e8d186ad510c518f439176cf8d5359d"}, - {file = "orjson-3.10.11-cp312-none-win_amd64.whl", hash = "sha256:f1eec3421a558ff7a9b010a6c7effcfa0ade65327a71bb9b02a1c3b77a247284"}, - {file = "orjson-3.10.11-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c46294faa4e4d0eb73ab68f1a794d2cbf7bab33b1dda2ac2959ffb7c61591899"}, - {file = "orjson-3.10.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52e5834d7d6e58a36846e059d00559cb9ed20410664f3ad156cd2cc239a11230"}, - {file = "orjson-3.10.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2fc947e5350fdce548bfc94f434e8760d5cafa97fb9c495d2fef6757aa02ec0"}, - {file = "orjson-3.10.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0efabbf839388a1dab5b72b5d3baedbd6039ac83f3b55736eb9934ea5494d258"}, - {file = "orjson-3.10.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a3f29634260708c200c4fe148e42b4aae97d7b9fee417fbdd74f8cfc265f15b0"}, - {file = "orjson-3.10.11-cp313-none-win32.whl", hash = "sha256:1a1222ffcee8a09476bbdd5d4f6f33d06d0d6642df2a3d78b7a195ca880d669b"}, - {file = "orjson-3.10.11-cp313-none-win_amd64.whl", hash = "sha256:bc274ac261cc69260913b2d1610760e55d3c0801bb3457ba7b9004420b6b4270"}, - {file = "orjson-3.10.11-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:19b3763e8bbf8ad797df6b6b5e0fc7c843ec2e2fc0621398534e0c6400098f87"}, - {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be83a13312e5e58d633580c5eb8d0495ae61f180da2722f20562974188af205"}, - {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:afacfd1ab81f46dedd7f6001b6d4e8de23396e4884cd3c3436bd05defb1a6446"}, - {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cb4d0bea56bba596723d73f074c420aec3b2e5d7d30698bc56e6048066bd560c"}, - {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96ed1de70fcb15d5fed529a656df29f768187628727ee2788344e8a51e1c1350"}, - {file = "orjson-3.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bfb30c891b530f3f80e801e3ad82ef150b964e5c38e1fb8482441c69c35c61c"}, - {file = "orjson-3.10.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d496c74fc2b61341e3cefda7eec21b7854c5f672ee350bc55d9a4997a8a95204"}, - {file = "orjson-3.10.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:655a493bac606655db9a47fe94d3d84fc7f3ad766d894197c94ccf0c5408e7d3"}, - {file = "orjson-3.10.11-cp38-none-win32.whl", hash = "sha256:b9546b278c9fb5d45380f4809e11b4dd9844ca7aaf1134024503e134ed226161"}, - {file = "orjson-3.10.11-cp38-none-win_amd64.whl", hash = "sha256:b592597fe551d518f42c5a2eb07422eb475aa8cfdc8c51e6da7054b836b26782"}, - {file = "orjson-3.10.11-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c95f2ecafe709b4e5c733b5e2768ac569bed308623c85806c395d9cca00e08af"}, - {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80c00d4acded0c51c98754fe8218cb49cb854f0f7eb39ea4641b7f71732d2cb7"}, - {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:461311b693d3d0a060439aa669c74f3603264d4e7a08faa68c47ae5a863f352d"}, - {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52ca832f17d86a78cbab86cdc25f8c13756ebe182b6fc1a97d534051c18a08de"}, - {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c57ea78a753812f528178aa2f1c57da633754c91d2124cb28991dab4c79a54"}, - {file = "orjson-3.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7fcfc6f7ca046383fb954ba528587e0f9336828b568282b27579c49f8e16aad"}, - {file = "orjson-3.10.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:86b9dd983857970c29e4c71bb3e95ff085c07d3e83e7c46ebe959bac07ebd80b"}, - {file = "orjson-3.10.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4d83f87582d223e54efb2242a79547611ba4ebae3af8bae1e80fa9a0af83bb7f"}, - {file = "orjson-3.10.11-cp39-none-win32.whl", hash = "sha256:9fd0ad1c129bc9beb1154c2655f177620b5beaf9a11e0d10bac63ef3fce96950"}, - {file = "orjson-3.10.11-cp39-none-win_amd64.whl", hash = "sha256:10f416b2a017c8bd17f325fb9dee1fb5cdd7a54e814284896b7c3f2763faa017"}, - {file = "orjson-3.10.11.tar.gz", hash = "sha256:e35b6d730de6384d5b2dab5fd23f0d76fae8bbc8c353c2f78210aa5fa4beb3ef"}, + {file = "orjson-3.10.12-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ece01a7ec71d9940cc654c482907a6b65df27251255097629d0dea781f255c6d"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c34ec9aebc04f11f4b978dd6caf697a2df2dd9b47d35aa4cc606cabcb9df69d7"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd6ec8658da3480939c79b9e9e27e0db31dffcd4ba69c334e98c9976ac29140e"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f17e6baf4cf01534c9de8a16c0c611f3d94925d1701bf5f4aff17003677d8ced"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6402ebb74a14ef96f94a868569f5dccf70d791de49feb73180eb3c6fda2ade56"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0000758ae7c7853e0a4a6063f534c61656ebff644391e1f81698c1b2d2fc8cd2"}, + {file = "orjson-3.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:888442dcee99fd1e5bd37a4abb94930915ca6af4db50e23e746cdf4d1e63db13"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c1f7a3ce79246aa0e92f5458d86c54f257fb5dfdc14a192651ba7ec2c00f8a05"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:802a3935f45605c66fb4a586488a38af63cb37aaad1c1d94c982c40dcc452e85"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1da1ef0113a2be19bb6c557fb0ec2d79c92ebd2fed4cfb1b26bab93f021fb885"}, + {file = "orjson-3.10.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a3273e99f367f137d5b3fecb5e9f45bcdbfac2a8b2f32fbc72129bbd48789c2"}, + {file = "orjson-3.10.12-cp310-none-win32.whl", hash = "sha256:475661bf249fd7907d9b0a2a2421b4e684355a77ceef85b8352439a9163418c3"}, + {file = "orjson-3.10.12-cp310-none-win_amd64.whl", hash = "sha256:87251dc1fb2b9e5ab91ce65d8f4caf21910d99ba8fb24b49fd0c118b2362d509"}, + {file = "orjson-3.10.12-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a734c62efa42e7df94926d70fe7d37621c783dea9f707a98cdea796964d4cf74"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:750f8b27259d3409eda8350c2919a58b0cfcd2054ddc1bd317a643afc646ef23"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb52c22bfffe2857e7aa13b4622afd0dd9d16ea7cc65fd2bf318d3223b1b6252"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:440d9a337ac8c199ff8251e100c62e9488924c92852362cd27af0e67308c16ef"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9e15c06491c69997dfa067369baab3bf094ecb74be9912bdc4339972323f252"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:362d204ad4b0b8724cf370d0cd917bb2dc913c394030da748a3bb632445ce7c4"}, + {file = "orjson-3.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b57cbb4031153db37b41622eac67329c7810e5f480fda4cfd30542186f006ae"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:165c89b53ef03ce0d7c59ca5c82fa65fe13ddf52eeb22e859e58c237d4e33b9b"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:5dee91b8dfd54557c1a1596eb90bcd47dbcd26b0baaed919e6861f076583e9da"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77a4e1cfb72de6f905bdff061172adfb3caf7a4578ebf481d8f0530879476c07"}, + {file = "orjson-3.10.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:038d42c7bc0606443459b8fe2d1f121db474c49067d8d14c6a075bbea8bf14dd"}, + {file = "orjson-3.10.12-cp311-none-win32.whl", hash = "sha256:03b553c02ab39bed249bedd4abe37b2118324d1674e639b33fab3d1dafdf4d79"}, + {file = "orjson-3.10.12-cp311-none-win_amd64.whl", hash = "sha256:8b8713b9e46a45b2af6b96f559bfb13b1e02006f4242c156cbadef27800a55a8"}, + {file = "orjson-3.10.12-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:53206d72eb656ca5ac7d3a7141e83c5bbd3ac30d5eccfe019409177a57634b0d"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac8010afc2150d417ebda810e8df08dd3f544e0dd2acab5370cfa6bcc0662f8f"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed459b46012ae950dd2e17150e838ab08215421487371fa79d0eced8d1461d70"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dcb9673f108a93c1b52bfc51b0af422c2d08d4fc710ce9c839faad25020bb69"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22a51ae77680c5c4652ebc63a83d5255ac7d65582891d9424b566fb3b5375ee9"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910fdf2ac0637b9a77d1aad65f803bac414f0b06f720073438a7bd8906298192"}, + {file = "orjson-3.10.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:24ce85f7100160936bc2116c09d1a8492639418633119a2224114f67f63a4559"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a76ba5fc8dd9c913640292df27bff80a685bed3a3c990d59aa6ce24c352f8fc"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ff70ef093895fd53f4055ca75f93f047e088d1430888ca1229393a7c0521100f"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f4244b7018b5753ecd10a6d324ec1f347da130c953a9c88432c7fbc8875d13be"}, + {file = "orjson-3.10.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:16135ccca03445f37921fa4b585cff9a58aa8d81ebcb27622e69bfadd220b32c"}, + {file = "orjson-3.10.12-cp312-none-win32.whl", hash = "sha256:2d879c81172d583e34153d524fcba5d4adafbab8349a7b9f16ae511c2cee8708"}, + {file = "orjson-3.10.12-cp312-none-win_amd64.whl", hash = "sha256:fc23f691fa0f5c140576b8c365bc942d577d861a9ee1142e4db468e4e17094fb"}, + {file = "orjson-3.10.12-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:47962841b2a8aa9a258b377f5188db31ba49af47d4003a32f55d6f8b19006543"}, + {file = "orjson-3.10.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6334730e2532e77b6054e87ca84f3072bee308a45a452ea0bffbbbc40a67e296"}, + {file = "orjson-3.10.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:accfe93f42713c899fdac2747e8d0d5c659592df2792888c6c5f829472e4f85e"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a7974c490c014c48810d1dede6c754c3cc46598da758c25ca3b4001ac45b703f"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3f250ce7727b0b2682f834a3facff88e310f52f07a5dcfd852d99637d386e79e"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f31422ff9486ae484f10ffc51b5ab2a60359e92d0716fcce1b3593d7bb8a9af6"}, + {file = "orjson-3.10.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5f29c5d282bb2d577c2a6bbde88d8fdcc4919c593f806aac50133f01b733846e"}, + {file = "orjson-3.10.12-cp313-none-win32.whl", hash = "sha256:f45653775f38f63dc0e6cd4f14323984c3149c05d6007b58cb154dd080ddc0dc"}, + {file = "orjson-3.10.12-cp313-none-win_amd64.whl", hash = "sha256:229994d0c376d5bdc91d92b3c9e6be2f1fbabd4cc1b59daae1443a46ee5e9825"}, + {file = "orjson-3.10.12-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7d69af5b54617a5fac5c8e5ed0859eb798e2ce8913262eb522590239db6c6763"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ed119ea7d2953365724a7059231a44830eb6bbb0cfead33fcbc562f5fd8f935"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c5fc1238ef197e7cad5c91415f524aaa51e004be5a9b35a1b8a84ade196f73f"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43509843990439b05f848539d6f6198d4ac86ff01dd024b2f9a795c0daeeab60"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f72e27a62041cfb37a3de512247ece9f240a561e6c8662276beaf4d53d406db4"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a904f9572092bb6742ab7c16c623f0cdccbad9eeb2d14d4aa06284867bddd31"}, + {file = "orjson-3.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:855c0833999ed5dc62f64552db26f9be767434917d8348d77bacaab84f787d7b"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:897830244e2320f6184699f598df7fb9db9f5087d6f3f03666ae89d607e4f8ed"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:0b32652eaa4a7539f6f04abc6243619c56f8530c53bf9b023e1269df5f7816dd"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:36b4aa31e0f6a1aeeb6f8377769ca5d125db000f05c20e54163aef1d3fe8e833"}, + {file = "orjson-3.10.12-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5535163054d6cbf2796f93e4f0dbc800f61914c0e3c4ed8499cf6ece22b4a3da"}, + {file = "orjson-3.10.12-cp38-none-win32.whl", hash = "sha256:90a5551f6f5a5fa07010bf3d0b4ca2de21adafbbc0af6cb700b63cd767266cb9"}, + {file = "orjson-3.10.12-cp38-none-win_amd64.whl", hash = "sha256:703a2fb35a06cdd45adf5d733cf613cbc0cb3ae57643472b16bc22d325b5fb6c"}, + {file = "orjson-3.10.12-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f29de3ef71a42a5822765def1febfb36e0859d33abf5c2ad240acad5c6a1b78d"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de365a42acc65d74953f05e4772c974dad6c51cfc13c3240899f534d611be967"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a5a0158648a67ff0004cb0df5df7dcc55bfc9ca154d9c01597a23ad54c8d0c"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c47ce6b8d90fe9646a25b6fb52284a14ff215c9595914af63a5933a49972ce36"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0eee4c2c5bfb5c1b47a5db80d2ac7aaa7e938956ae88089f098aff2c0f35d5d8"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35d3081bbe8b86587eb5c98a73b97f13d8f9fea685cf91a579beddacc0d10566"}, + {file = "orjson-3.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73c23a6e90383884068bc2dba83d5222c9fcc3b99a0ed2411d38150734236755"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5472be7dc3269b4b52acba1433dac239215366f89dc1d8d0e64029abac4e714e"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:7319cda750fca96ae5973efb31b17d97a5c5225ae0bc79bf5bf84df9e1ec2ab6"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:74d5ca5a255bf20b8def6a2b96b1e18ad37b4a122d59b154c458ee9494377f80"}, + {file = "orjson-3.10.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ff31d22ecc5fb85ef62c7d4afe8301d10c558d00dd24274d4bbe464380d3cd69"}, + {file = "orjson-3.10.12-cp39-none-win32.whl", hash = "sha256:c22c3ea6fba91d84fcb4cda30e64aff548fcf0c44c876e681f47d61d24b12e6b"}, + {file = "orjson-3.10.12-cp39-none-win_amd64.whl", hash = "sha256:be604f60d45ace6b0b33dd990a66b4526f1a7a186ac411c942674625456ca548"}, + {file = "orjson-3.10.12.tar.gz", hash = "sha256:0a78bbda3aea0f9f079057ee1ee8a1ecf790d4f1af88dd67493c6b8ee52506ff"}, ] [[package]] @@ -4413,8 +4675,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -4778,13 +5040,13 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p [[package]] name = "posthog" -version = "3.7.0" +version = "3.7.4" description = "Integrate PostHog into any python application." optional = false python-versions = "*" files = [ - {file = "posthog-3.7.0-py2.py3-none-any.whl", hash = "sha256:3555161c3a9557b5666f96d8e1f17f410ea0f07db56e399e336a1656d4e5c722"}, - {file = "posthog-3.7.0.tar.gz", hash = "sha256:b095d4354ba23f8b346ab5daed8ecfc5108772f922006982dfe8b2d29ebc6e0e"}, + {file = "posthog-3.7.4-py2.py3-none-any.whl", hash = "sha256:21c18c6bf43b2de303ea4cd6e95804cc0f24c20cb2a96a8fd09da2ed50b62faa"}, + {file = "posthog-3.7.4.tar.gz", hash = "sha256:19384bd09d330f9787a7e2446aba14c8057ece56144970ea2791072d4e40cd36"}, ] [package.dependencies] @@ -4829,129 +5091,113 @@ wcwidth = "*" [[package]] name = "propcache" -version = "0.2.0" +version = "0.2.1" description = "Accelerated property cache" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c5869b8fd70b81835a6f187c5fdbe67917a04d7e52b6e7cc4e5fe39d55c39d58"}, - {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:952e0d9d07609d9c5be361f33b0d6d650cd2bae393aabb11d9b719364521984b"}, - {file = "propcache-0.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:33ac8f098df0585c0b53009f039dfd913b38c1d2edafed0cedcc0c32a05aa110"}, - {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e48e8875e6c13909c800fa344cd54cc4b2b0db1d5f911f840458a500fde2c2"}, - {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388f3217649d6d59292b722d940d4d2e1e6a7003259eb835724092a1cca0203a"}, - {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f571aea50ba5623c308aa146eb650eebf7dbe0fd8c5d946e28343cb3b5aad577"}, - {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dfafb44f7bb35c0c06eda6b2ab4bfd58f02729e7c4045e179f9a861b07c9850"}, - {file = "propcache-0.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3ebe9a75be7ab0b7da2464a77bb27febcb4fab46a34f9288f39d74833db7f61"}, - {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d2f0d0f976985f85dfb5f3d685697ef769faa6b71993b46b295cdbbd6be8cc37"}, - {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a3dc1a4b165283bd865e8f8cb5f0c64c05001e0718ed06250d8cac9bec115b48"}, - {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e0f07b42d2a50c7dd2d8675d50f7343d998c64008f1da5fef888396b7f84630"}, - {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e63e3e1e0271f374ed489ff5ee73d4b6e7c60710e1f76af5f0e1a6117cd26394"}, - {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:56bb5c98f058a41bb58eead194b4db8c05b088c93d94d5161728515bd52b052b"}, - {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7665f04d0c7f26ff8bb534e1c65068409bf4687aa2534faf7104d7182debb336"}, - {file = "propcache-0.2.0-cp310-cp310-win32.whl", hash = "sha256:7cf18abf9764746b9c8704774d8b06714bcb0a63641518a3a89c7f85cc02c2ad"}, - {file = "propcache-0.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:cfac69017ef97db2438efb854edf24f5a29fd09a536ff3a992b75990720cdc99"}, - {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354"}, - {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de"}, - {file = "propcache-0.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87"}, - {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016"}, - {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb"}, - {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2"}, - {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4"}, - {file = "propcache-0.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504"}, - {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178"}, - {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d"}, - {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2"}, - {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db"}, - {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b"}, - {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b"}, - {file = "propcache-0.2.0-cp311-cp311-win32.whl", hash = "sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1"}, - {file = "propcache-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71"}, - {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2"}, - {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7"}, - {file = "propcache-0.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8"}, - {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793"}, - {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09"}, - {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89"}, - {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e"}, - {file = "propcache-0.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9"}, - {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4"}, - {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c"}, - {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887"}, - {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57"}, - {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23"}, - {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348"}, - {file = "propcache-0.2.0-cp312-cp312-win32.whl", hash = "sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5"}, - {file = "propcache-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3"}, - {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7"}, - {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763"}, - {file = "propcache-0.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d"}, - {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a"}, - {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b"}, - {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb"}, - {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf"}, - {file = "propcache-0.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2"}, - {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f"}, - {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136"}, - {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325"}, - {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44"}, - {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83"}, - {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544"}, - {file = "propcache-0.2.0-cp313-cp313-win32.whl", hash = "sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032"}, - {file = "propcache-0.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e"}, - {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:53d1bd3f979ed529f0805dd35ddaca330f80a9a6d90bc0121d2ff398f8ed8861"}, - {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83928404adf8fb3d26793665633ea79b7361efa0287dfbd372a7e74311d51ee6"}, - {file = "propcache-0.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77a86c261679ea5f3896ec060be9dc8e365788248cc1e049632a1be682442063"}, - {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:218db2a3c297a3768c11a34812e63b3ac1c3234c3a086def9c0fee50d35add1f"}, - {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7735e82e3498c27bcb2d17cb65d62c14f1100b71723b68362872bca7d0913d90"}, - {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20a617c776f520c3875cf4511e0d1db847a076d720714ae35ffe0df3e440be68"}, - {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b69535c870670c9f9b14a75d28baa32221d06f6b6fa6f77a0a13c5a7b0a5b9"}, - {file = "propcache-0.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4569158070180c3855e9c0791c56be3ceeb192defa2cdf6a3f39e54319e56b89"}, - {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:db47514ffdbd91ccdc7e6f8407aac4ee94cc871b15b577c1c324236b013ddd04"}, - {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:2a60ad3e2553a74168d275a0ef35e8c0a965448ffbc3b300ab3a5bb9956c2162"}, - {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:662dd62358bdeaca0aee5761de8727cfd6861432e3bb828dc2a693aa0471a563"}, - {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:25a1f88b471b3bc911d18b935ecb7115dff3a192b6fef46f0bfaf71ff4f12418"}, - {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:f60f0ac7005b9f5a6091009b09a419ace1610e163fa5deaba5ce3484341840e7"}, - {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:74acd6e291f885678631b7ebc85d2d4aec458dd849b8c841b57ef04047833bed"}, - {file = "propcache-0.2.0-cp38-cp38-win32.whl", hash = "sha256:d9b6ddac6408194e934002a69bcaadbc88c10b5f38fb9307779d1c629181815d"}, - {file = "propcache-0.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:676135dcf3262c9c5081cc8f19ad55c8a64e3f7282a21266d05544450bffc3a5"}, - {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:25c8d773a62ce0451b020c7b29a35cfbc05de8b291163a7a0f3b7904f27253e6"}, - {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:375a12d7556d462dc64d70475a9ee5982465fbb3d2b364f16b86ba9135793638"}, - {file = "propcache-0.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1ec43d76b9677637a89d6ab86e1fef70d739217fefa208c65352ecf0282be957"}, - {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f45eec587dafd4b2d41ac189c2156461ebd0c1082d2fe7013571598abb8505d1"}, - {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc092ba439d91df90aea38168e11f75c655880c12782facf5cf9c00f3d42b562"}, - {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa1076244f54bb76e65e22cb6910365779d5c3d71d1f18b275f1dfc7b0d71b4d"}, - {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:682a7c79a2fbf40f5dbb1eb6bfe2cd865376deeac65acf9beb607505dced9e12"}, - {file = "propcache-0.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e40876731f99b6f3c897b66b803c9e1c07a989b366c6b5b475fafd1f7ba3fb8"}, - {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:363ea8cd3c5cb6679f1c2f5f1f9669587361c062e4899fce56758efa928728f8"}, - {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:140fbf08ab3588b3468932974a9331aff43c0ab8a2ec2c608b6d7d1756dbb6cb"}, - {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e70fac33e8b4ac63dfc4c956fd7d85a0b1139adcfc0d964ce288b7c527537fea"}, - {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b33d7a286c0dc1a15f5fc864cc48ae92a846df287ceac2dd499926c3801054a6"}, - {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f6d5749fdd33d90e34c2efb174c7e236829147a2713334d708746e94c4bde40d"}, - {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22aa8f2272d81d9317ff5756bb108021a056805ce63dd3630e27d042c8092798"}, - {file = "propcache-0.2.0-cp39-cp39-win32.whl", hash = "sha256:73e4b40ea0eda421b115248d7e79b59214411109a5bc47d0d48e4c73e3b8fcf9"}, - {file = "propcache-0.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:9517d5e9e0731957468c29dbfd0f976736a0e55afaea843726e887f36fe017df"}, - {file = "propcache-0.2.0-py3-none-any.whl", hash = "sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036"}, - {file = "propcache-0.2.0.tar.gz", hash = "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70"}, + {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6"}, + {file = "propcache-0.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2"}, + {file = "propcache-0.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634"}, + {file = "propcache-0.2.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034"}, + {file = "propcache-0.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b"}, + {file = "propcache-0.2.1-cp310-cp310-win32.whl", hash = "sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4"}, + {file = "propcache-0.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717"}, + {file = "propcache-0.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af"}, + {file = "propcache-0.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca"}, + {file = "propcache-0.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e"}, + {file = "propcache-0.2.1-cp311-cp311-win32.whl", hash = "sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034"}, + {file = "propcache-0.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3"}, + {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a"}, + {file = "propcache-0.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0"}, + {file = "propcache-0.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24"}, + {file = "propcache-0.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6"}, + {file = "propcache-0.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518"}, + {file = "propcache-0.2.1-cp312-cp312-win32.whl", hash = "sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246"}, + {file = "propcache-0.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1"}, + {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc"}, + {file = "propcache-0.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9"}, + {file = "propcache-0.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052"}, + {file = "propcache-0.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f"}, + {file = "propcache-0.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30"}, + {file = "propcache-0.2.1-cp313-cp313-win32.whl", hash = "sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6"}, + {file = "propcache-0.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e"}, + {file = "propcache-0.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16"}, + {file = "propcache-0.2.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04"}, + {file = "propcache-0.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587"}, + {file = "propcache-0.2.1-cp39-cp39-win32.whl", hash = "sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb"}, + {file = "propcache-0.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1"}, + {file = "propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54"}, + {file = "propcache-0.2.1.tar.gz", hash = "sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64"}, ] [[package]] name = "protobuf" -version = "5.28.3" +version = "4.25.5" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-5.28.3-cp310-abi3-win32.whl", hash = "sha256:0c4eec6f987338617072592b97943fdbe30d019c56126493111cf24344c1cc24"}, - {file = "protobuf-5.28.3-cp310-abi3-win_amd64.whl", hash = "sha256:91fba8f445723fcf400fdbe9ca796b19d3b1242cd873907979b9ed71e4afe868"}, - {file = "protobuf-5.28.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a3f6857551e53ce35e60b403b8a27b0295f7d6eb63d10484f12bc6879c715687"}, - {file = "protobuf-5.28.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:3fa2de6b8b29d12c61911505d893afe7320ce7ccba4df913e2971461fa36d584"}, - {file = "protobuf-5.28.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:712319fbdddb46f21abb66cd33cb9e491a5763b2febd8f228251add221981135"}, - {file = "protobuf-5.28.3-cp38-cp38-win32.whl", hash = "sha256:3e6101d095dfd119513cde7259aa703d16c6bbdfae2554dfe5cfdbe94e32d548"}, - {file = "protobuf-5.28.3-cp38-cp38-win_amd64.whl", hash = "sha256:27b246b3723692bf1068d5734ddaf2fccc2cdd6e0c9b47fe099244d80200593b"}, - {file = "protobuf-5.28.3-cp39-cp39-win32.whl", hash = "sha256:135658402f71bbd49500322c0f736145731b16fc79dc8f367ab544a17eab4535"}, - {file = "protobuf-5.28.3-cp39-cp39-win_amd64.whl", hash = "sha256:70585a70fc2dd4818c51287ceef5bdba6387f88a578c86d47bb34669b5552c36"}, - {file = "protobuf-5.28.3-py3-none-any.whl", hash = "sha256:cee1757663fa32a1ee673434fcf3bf24dd54763c79690201208bafec62f19eed"}, - {file = "protobuf-5.28.3.tar.gz", hash = "sha256:64badbc49180a5e401f373f9ce7ab1d18b63f7dd4a9cdc43c92b9f0b481cef7b"}, + {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, + {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, + {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, + {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, + {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, + {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, + {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, + {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, + {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, ] [[package]] @@ -5048,53 +5294,53 @@ bcrypt = ["bcrypt (>=4.1.2,<5)"] [[package]] name = "pyarrow" -version = "18.0.0" +version = "18.1.0" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.9" files = [ - {file = "pyarrow-18.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:2333f93260674e185cfbf208d2da3007132572e56871f451ba1a556b45dae6e2"}, - {file = "pyarrow-18.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:4c381857754da44326f3a49b8b199f7f87a51c2faacd5114352fc78de30d3aba"}, - {file = "pyarrow-18.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:603cd8ad4976568954598ef0a6d4ed3dfb78aff3d57fa8d6271f470f0ce7d34f"}, - {file = "pyarrow-18.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58a62549a3e0bc9e03df32f350e10e1efb94ec6cf63e3920c3385b26663948ce"}, - {file = "pyarrow-18.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bc97316840a349485fbb137eb8d0f4d7057e1b2c1272b1a20eebbbe1848f5122"}, - {file = "pyarrow-18.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:2e549a748fa8b8715e734919923f69318c953e077e9c02140ada13e59d043310"}, - {file = "pyarrow-18.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:606e9a3dcb0f52307c5040698ea962685fb1c852d72379ee9412be7de9c5f9e2"}, - {file = "pyarrow-18.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d5795e37c0a33baa618c5e054cd61f586cf76850a251e2b21355e4085def6280"}, - {file = "pyarrow-18.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:5f0510608ccd6e7f02ca8596962afb8c6cc84c453e7be0da4d85f5f4f7b0328a"}, - {file = "pyarrow-18.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616ea2826c03c16e87f517c46296621a7c51e30400f6d0a61be645f203aa2b93"}, - {file = "pyarrow-18.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1824f5b029ddd289919f354bc285992cb4e32da518758c136271cf66046ef22"}, - {file = "pyarrow-18.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6dd1b52d0d58dd8f685ced9971eb49f697d753aa7912f0a8f50833c7a7426319"}, - {file = "pyarrow-18.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:320ae9bd45ad7ecc12ec858b3e8e462578de060832b98fc4d671dee9f10d9954"}, - {file = "pyarrow-18.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:2c992716cffb1088414f2b478f7af0175fd0a76fea80841b1706baa8fb0ebaad"}, - {file = "pyarrow-18.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:e7ab04f272f98ebffd2a0661e4e126036f6936391ba2889ed2d44c5006237802"}, - {file = "pyarrow-18.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:03f40b65a43be159d2f97fd64dc998f769d0995a50c00f07aab58b0b3da87e1f"}, - {file = "pyarrow-18.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be08af84808dff63a76860847c48ec0416928a7b3a17c2f49a072cac7c45efbd"}, - {file = "pyarrow-18.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c70c1965cde991b711a98448ccda3486f2a336457cf4ec4dca257a926e149c9"}, - {file = "pyarrow-18.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:00178509f379415a3fcf855af020e3340254f990a8534294ec3cf674d6e255fd"}, - {file = "pyarrow-18.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:a71ab0589a63a3e987beb2bc172e05f000a5c5be2636b4b263c44034e215b5d7"}, - {file = "pyarrow-18.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe92efcdbfa0bcf2fa602e466d7f2905500f33f09eb90bf0bcf2e6ca41b574c8"}, - {file = "pyarrow-18.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:907ee0aa8ca576f5e0cdc20b5aeb2ad4d3953a3b4769fc4b499e00ef0266f02f"}, - {file = "pyarrow-18.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:66dcc216ebae2eb4c37b223feaf82f15b69d502821dde2da138ec5a3716e7463"}, - {file = "pyarrow-18.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc1daf7c425f58527900876354390ee41b0ae962a73ad0959b9d829def583bb1"}, - {file = "pyarrow-18.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:871b292d4b696b09120ed5bde894f79ee2a5f109cb84470546471df264cae136"}, - {file = "pyarrow-18.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:082ba62bdcb939824ba1ce10b8acef5ab621da1f4c4805e07bfd153617ac19d4"}, - {file = "pyarrow-18.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:2c664ab88b9766413197733c1720d3dcd4190e8fa3bbdc3710384630a0a7207b"}, - {file = "pyarrow-18.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc892be34dbd058e8d189b47db1e33a227d965ea8805a235c8a7286f7fd17d3a"}, - {file = "pyarrow-18.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:28f9c39a56d2c78bf6b87dcc699d520ab850919d4a8c7418cd20eda49874a2ea"}, - {file = "pyarrow-18.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:f1a198a50c409ab2d009fbf20956ace84567d67f2c5701511d4dd561fae6f32e"}, - {file = "pyarrow-18.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5bd7fd32e3ace012d43925ea4fc8bd1b02cc6cc1e9813b518302950e89b5a22"}, - {file = "pyarrow-18.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:336addb8b6f5208be1b2398442c703a710b6b937b1a046065ee4db65e782ff5a"}, - {file = "pyarrow-18.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:45476490dd4adec5472c92b4d253e245258745d0ccaabe706f8d03288ed60a79"}, - {file = "pyarrow-18.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:b46591222c864e7da7faa3b19455196416cd8355ff6c2cc2e65726a760a3c420"}, - {file = "pyarrow-18.0.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:eb7e3abcda7e1e6b83c2dc2909c8d045881017270a119cc6ee7fdcfe71d02df8"}, - {file = "pyarrow-18.0.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:09f30690b99ce34e0da64d20dab372ee54431745e4efb78ac938234a282d15f9"}, - {file = "pyarrow-18.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d5ca5d707e158540312e09fd907f9f49bacbe779ab5236d9699ced14d2293b8"}, - {file = "pyarrow-18.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6331f280c6e4521c69b201a42dd978f60f7e129511a55da9e0bfe426b4ebb8d"}, - {file = "pyarrow-18.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3ac24b2be732e78a5a3ac0b3aa870d73766dd00beba6e015ea2ea7394f8b4e55"}, - {file = "pyarrow-18.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b30a927c6dff89ee702686596f27c25160dd6c99be5bcc1513a763ae5b1bfc03"}, - {file = "pyarrow-18.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:8f40ec677e942374e3d7f2fad6a67a4c2811a8b975e8703c6fd26d3b168a90e2"}, - {file = "pyarrow-18.0.0.tar.gz", hash = "sha256:a6aa027b1a9d2970cf328ccd6dbe4a996bc13c39fd427f502782f5bdb9ca20f5"}, + {file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e21488d5cfd3d8b500b3238a6c4b075efabc18f0f6d80b29239737ebd69caa6c"}, + {file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b516dad76f258a702f7ca0250885fc93d1fa5ac13ad51258e39d402bd9e2e1e4"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f443122c8e31f4c9199cb23dca29ab9427cef990f283f80fe15b8e124bcc49b"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a03da7f2758645d17b7b4f83c8bffeae5bbb7f974523fe901f36288d2eab71"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ba17845efe3aa358ec266cf9cc2800fa73038211fb27968bfa88acd09261a470"}, + {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3c35813c11a059056a22a3bef520461310f2f7eea5c8a11ef9de7062a23f8d56"}, + {file = "pyarrow-18.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9736ba3c85129d72aefa21b4f3bd715bc4190fe4426715abfff90481e7d00812"}, + {file = "pyarrow-18.1.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:eaeabf638408de2772ce3d7793b2668d4bb93807deed1725413b70e3156a7854"}, + {file = "pyarrow-18.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:3b2e2239339c538f3464308fd345113f886ad031ef8266c6f004d49769bb074c"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39a2e0ed32a0970e4e46c262753417a60c43a3246972cfc2d3eb85aedd01b21"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31e9417ba9c42627574bdbfeada7217ad8a4cbbe45b9d6bdd4b62abbca4c6f6"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:01c034b576ce0eef554f7c3d8c341714954be9b3f5d5bc7117006b85fcf302fe"}, + {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f266a2c0fc31995a06ebd30bcfdb7f615d7278035ec5b1cd71c48d56daaf30b0"}, + {file = "pyarrow-18.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:d4f13eee18433f99adefaeb7e01d83b59f73360c231d4782d9ddfaf1c3fbde0a"}, + {file = "pyarrow-18.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:9f3a76670b263dc41d0ae877f09124ab96ce10e4e48f3e3e4257273cee61ad0d"}, + {file = "pyarrow-18.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:da31fbca07c435be88a0c321402c4e31a2ba61593ec7473630769de8346b54ee"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:543ad8459bc438efc46d29a759e1079436290bd583141384c6f7a1068ed6f992"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0743e503c55be0fdb5c08e7d44853da27f19dc854531c0570f9f394ec9671d54"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d4b3d2a34780645bed6414e22dda55a92e0fcd1b8a637fba86800ad737057e33"}, + {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c52f81aa6f6575058d8e2c782bf79d4f9fdc89887f16825ec3a66607a5dd8e30"}, + {file = "pyarrow-18.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ad4892617e1a6c7a551cfc827e072a633eaff758fa09f21c4ee548c30bcaf99"}, + {file = "pyarrow-18.1.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:84e314d22231357d473eabec709d0ba285fa706a72377f9cc8e1cb3c8013813b"}, + {file = "pyarrow-18.1.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:f591704ac05dfd0477bb8f8e0bd4b5dc52c1cadf50503858dce3a15db6e46ff2"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acb7564204d3c40babf93a05624fc6a8ec1ab1def295c363afc40b0c9e66c191"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74de649d1d2ccb778f7c3afff6085bd5092aed4c23df9feeb45dd6b16f3811aa"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f96bd502cb11abb08efea6dab09c003305161cb6c9eafd432e35e76e7fa9b90c"}, + {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:36ac22d7782554754a3b50201b607d553a8d71b78cdf03b33c1125be4b52397c"}, + {file = "pyarrow-18.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:25dbacab8c5952df0ca6ca0af28f50d45bd31c1ff6fcf79e2d120b4a65ee7181"}, + {file = "pyarrow-18.1.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6a276190309aba7bc9d5bd2933230458b3521a4317acfefe69a354f2fe59f2bc"}, + {file = "pyarrow-18.1.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ad514dbfcffe30124ce655d72771ae070f30bf850b48bc4d9d3b25993ee0e386"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aebc13a11ed3032d8dd6e7171eb6e86d40d67a5639d96c35142bd568b9299324"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6cf5c05f3cee251d80e98726b5c7cc9f21bab9e9783673bac58e6dfab57ecc8"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:11b676cd410cf162d3f6a70b43fb9e1e40affbc542a1e9ed3681895f2962d3d9"}, + {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:b76130d835261b38f14fc41fdfb39ad8d672afb84c447126b84d5472244cfaba"}, + {file = "pyarrow-18.1.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:0b331e477e40f07238adc7ba7469c36b908f07c89b95dd4bd3a0ec84a3d1e21e"}, + {file = "pyarrow-18.1.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:2c4dd0c9010a25ba03e198fe743b1cc03cd33c08190afff371749c52ccbbaf76"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f97b31b4c4e21ff58c6f330235ff893cc81e23da081b1a4b1c982075e0ed4e9"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a4813cb8ecf1809871fd2d64a8eff740a1bd3691bbe55f01a3cf6c5ec869754"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:05a5636ec3eb5cc2a36c6edb534a38ef57b2ab127292a716d00eabb887835f1e"}, + {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:73eeed32e724ea3568bb06161cad5fa7751e45bc2228e33dcb10c614044165c7"}, + {file = "pyarrow-18.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:a1880dd6772b685e803011a6b43a230c23b566859a6e0c9a276c1e0faf4f4052"}, + {file = "pyarrow-18.1.0.tar.gz", hash = "sha256:9386d3ca9c145b5539a1cfc75df07757dff870168c959b473a0bccbc3abc8c73"}, ] [package.extras] @@ -5315,17 +5561,17 @@ torch = ["torch"] [[package]] name = "pylint" -version = "3.3.1" +version = "3.3.2" description = "python code static checker" optional = false python-versions = ">=3.9.0" files = [ - {file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"}, - {file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"}, + {file = "pylint-3.3.2-py3-none-any.whl", hash = "sha256:77f068c287d49b8683cd7c6e624243c74f92890f767f106ffa1ddf3c0a54cb7a"}, + {file = "pylint-3.3.2.tar.gz", hash = "sha256:9ec054ec992cd05ad30a6df1676229739a73f8feeabf3912c995d17601052b01"}, ] [package.dependencies] -astroid = ">=3.3.4,<=3.4.0-dev0" +astroid = ">=3.3.5,<=3.4.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, @@ -5435,6 +5681,40 @@ pytest = ">=7.0.0" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +[[package]] +name = "pytest-repeat" +version = "0.9.3" +description = "pytest plugin for repeating tests" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest_repeat-0.9.3-py3-none-any.whl", hash = "sha256:26ab2df18226af9d5ce441c858f273121e92ff55f5bb311d25755b8d7abdd8ed"}, + {file = "pytest_repeat-0.9.3.tar.gz", hash = "sha256:ffd3836dfcd67bb270bec648b330e20be37d2966448c4148c4092d1e8aba8185"}, +] + +[package.dependencies] +pytest = "*" + +[[package]] +name = "pytest-xdist" +version = "3.6.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, + {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, +] + +[package.dependencies] +execnet = ">=2.1" +pytest = ">=7.0.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -6134,13 +6414,13 @@ files = [ [[package]] name = "s3transfer" -version = "0.10.3" +version = "0.10.4" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d"}, - {file = "s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c"}, + {file = "s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e"}, + {file = "s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7"}, ] [package.dependencies] @@ -6270,13 +6550,13 @@ win32 = ["pywin32"] [[package]] name = "sentry-sdk" -version = "2.18.0" +version = "2.19.0" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = ">=3.6" files = [ - {file = "sentry_sdk-2.18.0-py2.py3-none-any.whl", hash = "sha256:ee70e27d1bbe4cd52a38e1bd28a5fadb9b17bc29d91b5f2b97ae29c0a7610442"}, - {file = "sentry_sdk-2.18.0.tar.gz", hash = "sha256:0dc21febd1ab35c648391c664df96f5f79fb0d92d7d4225cd9832e53a617cafd"}, + {file = "sentry_sdk-2.19.0-py2.py3-none-any.whl", hash = "sha256:7b0b3b709dee051337244a09a30dbf6e95afe0d34a1f8b430d45e0982a7c125b"}, + {file = "sentry_sdk-2.19.0.tar.gz", hash = "sha256:ee4a4d2ae8bfe3cac012dcf3e4607975904c137e1738116549fc3dbbb6ff0e36"}, ] [package.dependencies] @@ -6303,7 +6583,7 @@ grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] http2 = ["httpcore[http2] (==1.*)"] httpx = ["httpx (>=0.16.0)"] huey = ["huey (>=2)"] -huggingface-hub = ["huggingface-hub (>=0.22)"] +huggingface-hub = ["huggingface_hub (>=0.22)"] langchain = ["langchain (>=0.0.210)"] launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"] litestar = ["litestar (>=2.0.0)"] @@ -6312,7 +6592,7 @@ openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] openfeature = ["openfeature-sdk (>=0.7.1)"] opentelemetry = ["opentelemetry-distro (>=0.35b0)"] opentelemetry-experimental = ["opentelemetry-distro"] -pure-eval = ["asttokens", "executing", "pure-eval"] +pure-eval = ["asttokens", "executing", "pure_eval"] pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] @@ -6325,23 +6605,23 @@ tornado = ["tornado (>=6)"] [[package]] name = "setuptools" -version = "75.3.0" +version = "75.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"}, - {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"}, + {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, + {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] name = "shellingham" @@ -6652,15 +6932,29 @@ typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\"" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + [[package]] name = "tenacity" -version = "8.5.0" +version = "8.4.2" description = "Retry code until it succeeds" optional = false python-versions = ">=3.8" files = [ - {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"}, - {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"}, + {file = "tenacity-8.4.2-py3-none-any.whl", hash = "sha256:9e6f7cf7da729125c7437222f8a522279751cdfbe6b67bfe64f75d3a348661b2"}, + {file = "tenacity-8.4.2.tar.gz", hash = "sha256:cd80a53a79336edba8489e767f729e4f391c896956b57140b5d7511a64bbd3ef"}, ] [package.extras] @@ -6771,123 +7065,26 @@ test = ["pytest", "ruff"] [[package]] name = "tokenizers" -version = "0.20.3" +version = "0.21.0" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "tokenizers-0.20.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:31ccab28dbb1a9fe539787210b0026e22debeab1662970f61c2d921f7557f7e4"}, - {file = "tokenizers-0.20.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6361191f762bda98c773da418cf511cbaa0cb8d0a1196f16f8c0119bde68ff8"}, - {file = "tokenizers-0.20.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f128d5da1202b78fa0a10d8d938610472487da01b57098d48f7e944384362514"}, - {file = "tokenizers-0.20.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:79c4121a2e9433ad7ef0769b9ca1f7dd7fa4c0cd501763d0a030afcbc6384481"}, - {file = "tokenizers-0.20.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7850fde24197fe5cd6556e2fdba53a6d3bae67c531ea33a3d7c420b90904141"}, - {file = "tokenizers-0.20.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b357970c095dc134978a68c67d845a1e3803ab7c4fbb39195bde914e7e13cf8b"}, - {file = "tokenizers-0.20.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a333d878c4970b72d6c07848b90c05f6b045cf9273fc2bc04a27211721ad6118"}, - {file = "tokenizers-0.20.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fd9fee817f655a8f50049f685e224828abfadd436b8ff67979fc1d054b435f1"}, - {file = "tokenizers-0.20.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9e7816808b402129393a435ea2a509679b41246175d6e5e9f25b8692bfaa272b"}, - {file = "tokenizers-0.20.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba96367db9d8a730d3a1d5996b4b7babb846c3994b8ef14008cd8660f55db59d"}, - {file = "tokenizers-0.20.3-cp310-none-win32.whl", hash = "sha256:ee31ba9d7df6a98619426283e80c6359f167e2e9882d9ce1b0254937dbd32f3f"}, - {file = "tokenizers-0.20.3-cp310-none-win_amd64.whl", hash = "sha256:a845c08fdad554fe0871d1255df85772f91236e5fd6b9287ef8b64f5807dbd0c"}, - {file = "tokenizers-0.20.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:585b51e06ca1f4839ce7759941e66766d7b060dccfdc57c4ca1e5b9a33013a90"}, - {file = "tokenizers-0.20.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61cbf11954f3b481d08723ebd048ba4b11e582986f9be74d2c3bdd9293a4538d"}, - {file = "tokenizers-0.20.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef820880d5e4e8484e2fa54ff8d297bb32519eaa7815694dc835ace9130a3eea"}, - {file = "tokenizers-0.20.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:67ef4dcb8841a4988cd00dd288fb95dfc8e22ed021f01f37348fd51c2b055ba9"}, - {file = "tokenizers-0.20.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff1ef8bd47a02b0dc191688ccb4da53600df5d4c9a05a4b68e1e3de4823e78eb"}, - {file = "tokenizers-0.20.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:444d188186eab3148baf0615b522461b41b1f0cd58cd57b862ec94b6ac9780f1"}, - {file = "tokenizers-0.20.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37c04c032c1442740b2c2d925f1857885c07619224a533123ac7ea71ca5713da"}, - {file = "tokenizers-0.20.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:453c7769d22231960ee0e883d1005c93c68015025a5e4ae56275406d94a3c907"}, - {file = "tokenizers-0.20.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4bb31f7b2847e439766aaa9cc7bccf7ac7088052deccdb2275c952d96f691c6a"}, - {file = "tokenizers-0.20.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:843729bf0f991b29655a069a2ff58a4c24375a553c70955e15e37a90dd4e045c"}, - {file = "tokenizers-0.20.3-cp311-none-win32.whl", hash = "sha256:efcce3a927b1e20ca694ba13f7a68c59b0bd859ef71e441db68ee42cf20c2442"}, - {file = "tokenizers-0.20.3-cp311-none-win_amd64.whl", hash = "sha256:88301aa0801f225725b6df5dea3d77c80365ff2362ca7e252583f2b4809c4cc0"}, - {file = "tokenizers-0.20.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:49d12a32e190fad0e79e5bdb788d05da2f20d8e006b13a70859ac47fecf6ab2f"}, - {file = "tokenizers-0.20.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:282848cacfb9c06d5e51489f38ec5aa0b3cd1e247a023061945f71f41d949d73"}, - {file = "tokenizers-0.20.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abe4e08c7d0cd6154c795deb5bf81d2122f36daf075e0c12a8b050d824ef0a64"}, - {file = "tokenizers-0.20.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ca94fc1b73b3883c98f0c88c77700b13d55b49f1071dfd57df2b06f3ff7afd64"}, - {file = "tokenizers-0.20.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef279c7e239f95c8bdd6ff319d9870f30f0d24915b04895f55b1adcf96d6c60d"}, - {file = "tokenizers-0.20.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16384073973f6ccbde9852157a4fdfe632bb65208139c9d0c0bd0176a71fd67f"}, - {file = "tokenizers-0.20.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:312d522caeb8a1a42ebdec87118d99b22667782b67898a76c963c058a7e41d4f"}, - {file = "tokenizers-0.20.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2b7cb962564785a83dafbba0144ecb7f579f1d57d8c406cdaa7f32fe32f18ad"}, - {file = "tokenizers-0.20.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:124c5882ebb88dadae1fc788a582299fcd3a8bd84fc3e260b9918cf28b8751f5"}, - {file = "tokenizers-0.20.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2b6e54e71f84c4202111a489879005cb14b92616a87417f6c102c833af961ea2"}, - {file = "tokenizers-0.20.3-cp312-none-win32.whl", hash = "sha256:83d9bfbe9af86f2d9df4833c22e94d94750f1d0cd9bfb22a7bb90a86f61cdb1c"}, - {file = "tokenizers-0.20.3-cp312-none-win_amd64.whl", hash = "sha256:44def74cee574d609a36e17c8914311d1b5dbcfe37c55fd29369d42591b91cf2"}, - {file = "tokenizers-0.20.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e0b630e0b536ef0e3c8b42c685c1bc93bd19e98c0f1543db52911f8ede42cf84"}, - {file = "tokenizers-0.20.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a02d160d2b19bcbfdf28bd9a4bf11be4cb97d0499c000d95d4c4b1a4312740b6"}, - {file = "tokenizers-0.20.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e3d80d89b068bc30034034b5319218c7c0a91b00af19679833f55f3becb6945"}, - {file = "tokenizers-0.20.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:174a54910bed1b089226512b4458ea60d6d6fd93060254734d3bc3540953c51c"}, - {file = "tokenizers-0.20.3-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:098b8a632b8656aa5802c46689462c5c48f02510f24029d71c208ec2c822e771"}, - {file = "tokenizers-0.20.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78c8c143e3ae41e718588281eb3e212c2b31623c9d6d40410ec464d7d6221fb5"}, - {file = "tokenizers-0.20.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b26b0aadb18cd8701077362ba359a06683662d5cafe3e8e8aba10eb05c037f1"}, - {file = "tokenizers-0.20.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07d7851a72717321022f3774e84aa9d595a041d643fafa2e87fbc9b18711dac0"}, - {file = "tokenizers-0.20.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:bd44e48a430ada902c6266a8245f5036c4fe744fcb51f699999fbe82aa438797"}, - {file = "tokenizers-0.20.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a4c186bb006ccbe1f5cc4e0380d1ce7806f5955c244074fd96abc55e27b77f01"}, - {file = "tokenizers-0.20.3-cp313-none-win32.whl", hash = "sha256:6e19e0f1d854d6ab7ea0c743d06e764d1d9a546932be0a67f33087645f00fe13"}, - {file = "tokenizers-0.20.3-cp313-none-win_amd64.whl", hash = "sha256:d50ede425c7e60966a9680d41b58b3a0950afa1bb570488e2972fa61662c4273"}, - {file = "tokenizers-0.20.3-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:9adda1ff5fb9dcdf899ceca672a4e2ce9e797adb512a6467305ca3d8bfcfbdd0"}, - {file = "tokenizers-0.20.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:6dde2cae6004ba7a3badff4a11911cae03ebf23e97eebfc0e71fef2530e5074f"}, - {file = "tokenizers-0.20.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4a7fd678b35614fca708579eb95b7587a5e8a6d328171bd2488fd9f27d82be4"}, - {file = "tokenizers-0.20.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b80e3c7283a01a356bd2210f53d1a4a5d32b269c2024389ed0173137708d50e"}, - {file = "tokenizers-0.20.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8cc0e8176b762973758a77f0d9c4467d310e33165fb74173418ca3734944da4"}, - {file = "tokenizers-0.20.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5634b2e2f5f3d2b4439d2d74066e22eb4b1f04f3fea05cb2a3c12d89b5a3bcd"}, - {file = "tokenizers-0.20.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b4ba635165bc1ea46f2da8e5d80b5f70f6ec42161e38d96dbef33bb39df73964"}, - {file = "tokenizers-0.20.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18e4c7c64172e7789bd8b07aa3087ea87c4c4de7e90937a2aa036b5d92332536"}, - {file = "tokenizers-0.20.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1f74909ef7675c26d4095a817ec3393d67f3158ca4836c233212e5613ef640c4"}, - {file = "tokenizers-0.20.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0e9b81321a1e05b16487d312b4264984513f8b4a7556229cafac6e88c2036b09"}, - {file = "tokenizers-0.20.3-cp37-none-win32.whl", hash = "sha256:ab48184cd58b4a03022a2ec75b54c9f600ffea9a733612c02325ed636f353729"}, - {file = "tokenizers-0.20.3-cp37-none-win_amd64.whl", hash = "sha256:60ac483cebee1c12c71878523e768df02fa17e4c54412966cb3ac862c91b36c1"}, - {file = "tokenizers-0.20.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3229ef103c89583d10b9378afa5d601b91e6337530a0988e17ca8d635329a996"}, - {file = "tokenizers-0.20.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6ac52cc24bad3de865c7e65b1c4e7b70d00938a8ae09a92a453b8f676e714ad5"}, - {file = "tokenizers-0.20.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04627b7b502fa6a2a005e1bd446fa4247d89abcb1afaa1b81eb90e21aba9a60f"}, - {file = "tokenizers-0.20.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c27ceb887f0e81a3c377eb4605dca7a95a81262761c0fba308d627b2abb98f2b"}, - {file = "tokenizers-0.20.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65ab780194da4e1fcf5670523a2f377c4838ebf5249efe41fa1eddd2a84fb49d"}, - {file = "tokenizers-0.20.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98d343134f47159e81f7f242264b0eb222e6b802f37173c8d7d7b64d5c9d1388"}, - {file = "tokenizers-0.20.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2475bb004ab2009d29aff13b5047bfdb3d4b474f0aa9d4faa13a7f34dbbbb43"}, - {file = "tokenizers-0.20.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b6583a65c01db1197c1eb36857ceba8ec329d53afadd268b42a6b04f4965724"}, - {file = "tokenizers-0.20.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:62d00ba208358c037eeab7bfc00a905adc67b2d31b68ab40ed09d75881e114ea"}, - {file = "tokenizers-0.20.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0fc7a39e5bedc817bda395a798dfe2d9c5f7c71153c90d381b5135a0328d9520"}, - {file = "tokenizers-0.20.3-cp38-none-win32.whl", hash = "sha256:84d40ee0f8550d64d3ea92dd7d24a8557a9172165bdb986c9fb2503b4fe4e3b6"}, - {file = "tokenizers-0.20.3-cp38-none-win_amd64.whl", hash = "sha256:205a45246ed7f1718cf3785cff88450ba603352412aaf220ace026384aa3f1c0"}, - {file = "tokenizers-0.20.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:93e37f0269a11dc3b1a953f1fca9707f0929ebf8b4063c591c71a0664219988e"}, - {file = "tokenizers-0.20.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f4cb0c614b0135e781de96c2af87e73da0389ac1458e2a97562ed26e29490d8d"}, - {file = "tokenizers-0.20.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7eb2fb1c432f5746b22f8a7f09fc18c4156cb0031c77f53cb19379d82d43297a"}, - {file = "tokenizers-0.20.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfa8d029bb156181b006643309d6b673615a24e4ed24cf03aa191d599b996f51"}, - {file = "tokenizers-0.20.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f90549622de3bf476ad9f1dd6f3f952ec3ed6ab8615ae88ef060d0c5bfad55d"}, - {file = "tokenizers-0.20.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1d469c74eebf5c43fd61cd9b030e271d17198edd7bd45392e03a3c091d7d6d4"}, - {file = "tokenizers-0.20.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bee8f53b2594749f4460d53253bae55d718f04e9b633efa0f5df8938bd98e4f0"}, - {file = "tokenizers-0.20.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:938441babf3e5720e4459e306ef2809fb267680df9d1ff2873458b22aef60248"}, - {file = "tokenizers-0.20.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7310ab23d7b0caebecc0e8be11a1146f320f5f07284000f6ea54793e83de1b75"}, - {file = "tokenizers-0.20.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:16121eb030a2b13094cfec936b0c12e8b4063c5f839591ea7d0212336d8f9921"}, - {file = "tokenizers-0.20.3-cp39-none-win32.whl", hash = "sha256:401cc21ef642ee235985d747f65e18f639464d377c70836c9003df208d582064"}, - {file = "tokenizers-0.20.3-cp39-none-win_amd64.whl", hash = "sha256:7498f3ea7746133335a6adb67a77cf77227a8b82c8483f644a2e5f86fea42b8d"}, - {file = "tokenizers-0.20.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e919f2e3e68bb51dc31de4fcbbeff3bdf9c1cad489044c75e2b982a91059bd3c"}, - {file = "tokenizers-0.20.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b8e9608f2773996cc272156e305bd79066163a66b0390fe21750aff62df1ac07"}, - {file = "tokenizers-0.20.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39270a7050deaf50f7caff4c532c01b3c48f6608d42b3eacdebdc6795478c8df"}, - {file = "tokenizers-0.20.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e005466632b1c5d2d2120f6de8aa768cc9d36cd1ab7d51d0c27a114c91a1e6ee"}, - {file = "tokenizers-0.20.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a07962340b36189b6c8feda552ea1bfeee6cf067ff922a1d7760662c2ee229e5"}, - {file = "tokenizers-0.20.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:55046ad3dd5f2b3c67501fcc8c9cbe3e901d8355f08a3b745e9b57894855f85b"}, - {file = "tokenizers-0.20.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:efcf0eb939988b627558aaf2b9dc3e56d759cad2e0cfa04fcab378e4b48fc4fd"}, - {file = "tokenizers-0.20.3-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f3558a7ae6a6d38a77dfce12172a1e2e1bf3e8871e744a1861cd7591ea9ebe24"}, - {file = "tokenizers-0.20.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d53029fe44bc70c3ff14ef512460a0cf583495a0f8e2f4b70e26eb9438e38a9"}, - {file = "tokenizers-0.20.3-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a2a56397b2bec5a629b516b23f0f8a3e4f978c7488d4a299980f8375954b85"}, - {file = "tokenizers-0.20.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e5bfaae740ef9ece000f8a07e78ac0e2b085c5ce9648f8593ddf0243c9f76d"}, - {file = "tokenizers-0.20.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fbaf3ea28fedfb2283da60e710aff25492e795a7397cad8a50f1e079b65a5a70"}, - {file = "tokenizers-0.20.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c47c037116310dc976eb96b008e41b9cfaba002ed8005848d4d632ee0b7ba9ae"}, - {file = "tokenizers-0.20.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c31751f0721f58f5e19bb27c1acc259aeff860d8629c4e1a900b26a1979ada8e"}, - {file = "tokenizers-0.20.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:c697cbd3be7a79ea250ea5f380d6f12e534c543cfb137d5c734966b3ee4f34cc"}, - {file = "tokenizers-0.20.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b48971b88ef9130bf35b41b35fd857c3c4dae4a9cd7990ebc7fc03e59cc92438"}, - {file = "tokenizers-0.20.3-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e615de179bbe060ab33773f0d98a8a8572b5883dd7dac66c1de8c056c7e748c"}, - {file = "tokenizers-0.20.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da1ec842035ed9999c62e45fbe0ff14b7e8a7e02bb97688cc6313cf65e5cd755"}, - {file = "tokenizers-0.20.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6ee4954c1dd23aadc27958dad759006e71659d497dcb0ef0c7c87ea992c16ebd"}, - {file = "tokenizers-0.20.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3eda46ca402751ec82553a321bf35a617b76bbed7586e768c02ccacbdda94d6d"}, - {file = "tokenizers-0.20.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:de082392a85eb0055cc055c535bff2f0cc15d7a000bdc36fbf601a0f3cf8507a"}, - {file = "tokenizers-0.20.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c3db46cc0647bfd88263afdb739b92017a02a87ee30945cb3e86c7e25c7c9917"}, - {file = "tokenizers-0.20.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a292392f24ab9abac5cfa8197e5a6208f2e43723420217e1ceba0b4ec77816ac"}, - {file = "tokenizers-0.20.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dcd91f4e60f62b20d83a87a84fe062035a1e3ff49a8c2bbdeb2d441c8e311f4"}, - {file = "tokenizers-0.20.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:900991a2b8ee35961b1095db7e265342e0e42a84c1a594823d5ee9f8fb791958"}, - {file = "tokenizers-0.20.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5a8d8261ca2133d4f98aa9627c748189502b3787537ba3d7e2beb4f7cfc5d627"}, - {file = "tokenizers-0.20.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c4fd4d71e6deb6ddf99d8d0eab87d1d16f635898906e631914a9bae8ae9f2cfb"}, - {file = "tokenizers-0.20.3.tar.gz", hash = "sha256:2278b34c5d0dd78e087e1ca7f9b1dcbf129d80211afa645f214bd6e051037539"}, + {file = "tokenizers-0.21.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3c4c93eae637e7d2aaae3d376f06085164e1660f89304c0ab2b1d08a406636b2"}, + {file = "tokenizers-0.21.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:f53ea537c925422a2e0e92a24cce96f6bc5046bbef24a1652a5edc8ba975f62e"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b177fb54c4702ef611de0c069d9169f0004233890e0c4c5bd5508ae05abf193"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6b43779a269f4629bebb114e19c3fca0223296ae9fea8bb9a7a6c6fb0657ff8e"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aeb255802be90acfd363626753fda0064a8df06031012fe7d52fd9a905eb00e"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8b09dbeb7a8d73ee204a70f94fc06ea0f17dcf0844f16102b9f414f0b7463ba"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:400832c0904f77ce87c40f1a8a27493071282f785724ae62144324f171377273"}, + {file = "tokenizers-0.21.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84ca973b3a96894d1707e189c14a774b701596d579ffc7e69debfc036a61a04"}, + {file = "tokenizers-0.21.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:eb7202d231b273c34ec67767378cd04c767e967fda12d4a9e36208a34e2f137e"}, + {file = "tokenizers-0.21.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:089d56db6782a73a27fd8abf3ba21779f5b85d4a9f35e3b493c7bbcbbf0d539b"}, + {file = "tokenizers-0.21.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:c87ca3dc48b9b1222d984b6b7490355a6fdb411a2d810f6f05977258400ddb74"}, + {file = "tokenizers-0.21.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4145505a973116f91bc3ac45988a92e618a6f83eb458f49ea0790df94ee243ff"}, + {file = "tokenizers-0.21.0-cp39-abi3-win32.whl", hash = "sha256:eb1702c2f27d25d9dd5b389cc1f2f51813e99f8ca30d9e25348db6585a97e24a"}, + {file = "tokenizers-0.21.0-cp39-abi3-win_amd64.whl", hash = "sha256:87841da5a25a3a5f70c102de371db120f41873b854ba65e52bccd57df5a3780c"}, + {file = "tokenizers-0.21.0.tar.gz", hash = "sha256:ee0894bf311b75b0c03079f33859ae4b2334d675d4e93f5a4132e1eae2834fe4"}, ] [package.dependencies] @@ -6900,13 +7097,43 @@ testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] [[package]] name = "tomli" -version = "2.0.2" +version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, - {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] @@ -6922,40 +7149,40 @@ files = [ [[package]] name = "tornado" -version = "6.4.1" +version = "6.4.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">=3.8" files = [ - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, - {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, - {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, - {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, + {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, + {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c"}, + {file = "tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482"}, + {file = "tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38"}, + {file = "tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b"}, ] [[package]] name = "tqdm" -version = "4.67.0" +version = "4.67.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.67.0-py3-none-any.whl", hash = "sha256:0cd8af9d56911acab92182e88d763100d4788bdf421d251616040cc4d44863be"}, - {file = "tqdm-4.67.0.tar.gz", hash = "sha256:fe5a6f95e6fe0b9755e9469b77b9c3cf850048224ecaa8293d7d2d31f97d869a"}, + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] discord = ["requests"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] @@ -7001,13 +7228,13 @@ test = ["vcrpy (>=1.10.3)"] [[package]] name = "typer" -version = "0.13.0" +version = "0.14.0" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" files = [ - {file = "typer-0.13.0-py3-none-any.whl", hash = "sha256:d85fe0b777b2517cc99c8055ed735452f2659cd45e451507c76f48ce5c1d00e2"}, - {file = "typer-0.13.0.tar.gz", hash = "sha256:f1c7198347939361eec90139ffa0fd8b3df3a2259d5852a0f7400e476d95985c"}, + {file = "typer-0.14.0-py3-none-any.whl", hash = "sha256:f476233a25770ab3e7b2eebf7c68f3bc702031681a008b20167573a4b7018f09"}, + {file = "typer-0.14.0.tar.gz", hash = "sha256:af58f737f8d0c0c37b9f955a6d39000b9ff97813afcbeef56af5e37cf743b45a"}, ] [package.dependencies] @@ -7029,13 +7256,13 @@ files = [ [[package]] name = "types-setuptools" -version = "75.3.0.20241107" +version = "75.6.0.20241126" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.8" files = [ - {file = "types-setuptools-75.3.0.20241107.tar.gz", hash = "sha256:f66710e1cd4a936e5fcc12d4e49be1a67c34372cf753e87ebe704426451b4012"}, - {file = "types_setuptools-75.3.0.20241107-py3-none-any.whl", hash = "sha256:bc6de6e2bcb6d610556304d0a69fe4ca208ac4896162647314ecfd9fd73d8550"}, + {file = "types_setuptools-75.6.0.20241126-py3-none-any.whl", hash = "sha256:aaae310a0e27033c1da8457d4d26ac673b0c8a0de7272d6d4708e263f2ea3b9b"}, + {file = "types_setuptools-75.6.0.20241126.tar.gz", hash = "sha256:7bf25ad4be39740e469f9268b6beddda6e088891fa5a27e985c6ce68bf62ace0"}, ] [[package]] @@ -7053,7 +7280,7 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." -optional = true +optional = false python-versions = "*" files = [ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, @@ -7268,13 +7495,13 @@ test = ["websockets"] [[package]] name = "wheel" -version = "0.45.0" +version = "0.45.1" description = "A built-package format for Python" optional = false python-versions = ">=3.8" files = [ - {file = "wheel-0.45.0-py3-none-any.whl", hash = "sha256:52f0baa5e6522155090a09c6bd95718cc46956d1b51d537ea5454249edb671c7"}, - {file = "wheel-0.45.0.tar.gz", hash = "sha256:a57353941a3183b3d5365346b567a260a0602a0f8a635926a7dede41b94c674a"}, + {file = "wheel-0.45.1-py3-none-any.whl", hash = "sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248"}, + {file = "wheel-0.45.1.tar.gz", hash = "sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729"}, ] [package.extras] @@ -7304,81 +7531,76 @@ files = [ [[package]] name = "wrapt" -version = "1.16.0" +version = "1.17.0" description = "Module for decorators, wrappers and monkey patching." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, + {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"}, + {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"}, + {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"}, + {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"}, + {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"}, + {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"}, + {file = "wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4"}, + {file = "wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635"}, + {file = "wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7"}, + {file = "wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a"}, + {file = "wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045"}, + {file = "wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838"}, + {file = "wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab"}, + {file = "wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf"}, + {file = "wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a"}, + {file = "wrapt-1.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69c40d4655e078ede067a7095544bcec5a963566e17503e75a3a3e0fe2803b13"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f495b6754358979379f84534f8dd7a43ff8cff2558dcdea4a148a6e713a758f"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa7ef4e0886a6f482e00d1d5bcd37c201b383f1d314643dfb0367169f94f04c"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fc931382e56627ec4acb01e09ce66e5c03c384ca52606111cee50d931a342d"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8f8909cdb9f1b237786c09a810e24ee5e15ef17019f7cecb207ce205b9b5fcce"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad47b095f0bdc5585bced35bd088cbfe4177236c7df9984b3cc46b391cc60627"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:948a9bd0fb2c5120457b07e59c8d7210cbc8703243225dbd78f4dfc13c8d2d1f"}, + {file = "wrapt-1.17.0-cp38-cp38-win32.whl", hash = "sha256:5ae271862b2142f4bc687bdbfcc942e2473a89999a54231aa1c2c676e28f29ea"}, + {file = "wrapt-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:f335579a1b485c834849e9075191c9898e0731af45705c2ebf70e0cd5d58beed"}, + {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"}, + {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"}, + {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"}, + {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"}, + {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"}, ] [[package]] @@ -7515,93 +7737,93 @@ files = [ [[package]] name = "yarl" -version = "1.17.1" +version = "1.18.3" description = "Yet another URL library" optional = false python-versions = ">=3.9" files = [ - {file = "yarl-1.17.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1794853124e2f663f0ea54efb0340b457f08d40a1cef78edfa086576179c91"}, - {file = "yarl-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fbea1751729afe607d84acfd01efd95e3b31db148a181a441984ce9b3d3469da"}, - {file = "yarl-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ee427208c675f1b6e344a1f89376a9613fc30b52646a04ac0c1f6587c7e46ec"}, - {file = "yarl-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b74ff4767d3ef47ffe0cd1d89379dc4d828d4873e5528976ced3b44fe5b0a21"}, - {file = "yarl-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62a91aefff3d11bf60e5956d340eb507a983a7ec802b19072bb989ce120cd948"}, - {file = "yarl-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:846dd2e1243407133d3195d2d7e4ceefcaa5f5bf7278f0a9bda00967e6326b04"}, - {file = "yarl-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e844be8d536afa129366d9af76ed7cb8dfefec99f5f1c9e4f8ae542279a6dc3"}, - {file = "yarl-1.17.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc7c92c1baa629cb03ecb0c3d12564f172218fb1739f54bf5f3881844daadc6d"}, - {file = "yarl-1.17.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae3476e934b9d714aa8000d2e4c01eb2590eee10b9d8cd03e7983ad65dfbfcba"}, - {file = "yarl-1.17.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c7e177c619342e407415d4f35dec63d2d134d951e24b5166afcdfd1362828e17"}, - {file = "yarl-1.17.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64cc6e97f14cf8a275d79c5002281f3040c12e2e4220623b5759ea7f9868d6a5"}, - {file = "yarl-1.17.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:84c063af19ef5130084db70ada40ce63a84f6c1ef4d3dbc34e5e8c4febb20822"}, - {file = "yarl-1.17.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:482c122b72e3c5ec98f11457aeb436ae4aecca75de19b3d1de7cf88bc40db82f"}, - {file = "yarl-1.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:380e6c38ef692b8fd5a0f6d1fa8774d81ebc08cfbd624b1bca62a4d4af2f9931"}, - {file = "yarl-1.17.1-cp310-cp310-win32.whl", hash = "sha256:16bca6678a83657dd48df84b51bd56a6c6bd401853aef6d09dc2506a78484c7b"}, - {file = "yarl-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:561c87fea99545ef7d692403c110b2f99dced6dff93056d6e04384ad3bc46243"}, - {file = "yarl-1.17.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cbad927ea8ed814622305d842c93412cb47bd39a496ed0f96bfd42b922b4a217"}, - {file = "yarl-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fca4b4307ebe9c3ec77a084da3a9d1999d164693d16492ca2b64594340999988"}, - {file = "yarl-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff5c6771c7e3511a06555afa317879b7db8d640137ba55d6ab0d0c50425cab75"}, - {file = "yarl-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b29beab10211a746f9846baa39275e80034e065460d99eb51e45c9a9495bcca"}, - {file = "yarl-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a52a1ffdd824fb1835272e125385c32fd8b17fbdefeedcb4d543cc23b332d74"}, - {file = "yarl-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58c8e9620eb82a189c6c40cb6b59b4e35b2ee68b1f2afa6597732a2b467d7e8f"}, - {file = "yarl-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d216e5d9b8749563c7f2c6f7a0831057ec844c68b4c11cb10fc62d4fd373c26d"}, - {file = "yarl-1.17.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:881764d610e3269964fc4bb3c19bb6fce55422828e152b885609ec176b41cf11"}, - {file = "yarl-1.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8c79e9d7e3d8a32d4824250a9c6401194fb4c2ad9a0cec8f6a96e09a582c2cc0"}, - {file = "yarl-1.17.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:299f11b44d8d3a588234adbe01112126010bd96d9139c3ba7b3badd9829261c3"}, - {file = "yarl-1.17.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:cc7d768260f4ba4ea01741c1b5fe3d3a6c70eb91c87f4c8761bbcce5181beafe"}, - {file = "yarl-1.17.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:de599af166970d6a61accde358ec9ded821234cbbc8c6413acfec06056b8e860"}, - {file = "yarl-1.17.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2b24ec55fad43e476905eceaf14f41f6478780b870eda5d08b4d6de9a60b65b4"}, - {file = "yarl-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9fb815155aac6bfa8d86184079652c9715c812d506b22cfa369196ef4e99d1b4"}, - {file = "yarl-1.17.1-cp311-cp311-win32.whl", hash = "sha256:7615058aabad54416ddac99ade09a5510cf77039a3b903e94e8922f25ed203d7"}, - {file = "yarl-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:14bc88baa44e1f84164a392827b5defb4fa8e56b93fecac3d15315e7c8e5d8b3"}, - {file = "yarl-1.17.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:327828786da2006085a4d1feb2594de6f6d26f8af48b81eb1ae950c788d97f61"}, - {file = "yarl-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cc353841428d56b683a123a813e6a686e07026d6b1c5757970a877195f880c2d"}, - {file = "yarl-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c73df5b6e8fabe2ddb74876fb82d9dd44cbace0ca12e8861ce9155ad3c886139"}, - {file = "yarl-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bdff5e0995522706c53078f531fb586f56de9c4c81c243865dd5c66c132c3b5"}, - {file = "yarl-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:06157fb3c58f2736a5e47c8fcbe1afc8b5de6fb28b14d25574af9e62150fcaac"}, - {file = "yarl-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1654ec814b18be1af2c857aa9000de7a601400bd4c9ca24629b18486c2e35463"}, - {file = "yarl-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f6595c852ca544aaeeb32d357e62c9c780eac69dcd34e40cae7b55bc4fb1147"}, - {file = "yarl-1.17.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:459e81c2fb920b5f5df744262d1498ec2c8081acdcfe18181da44c50f51312f7"}, - {file = "yarl-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7e48cdb8226644e2fbd0bdb0a0f87906a3db07087f4de77a1b1b1ccfd9e93685"}, - {file = "yarl-1.17.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d9b6b28a57feb51605d6ae5e61a9044a31742db557a3b851a74c13bc61de5172"}, - {file = "yarl-1.17.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e594b22688d5747b06e957f1ef822060cb5cb35b493066e33ceac0cf882188b7"}, - {file = "yarl-1.17.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5f236cb5999ccd23a0ab1bd219cfe0ee3e1c1b65aaf6dd3320e972f7ec3a39da"}, - {file = "yarl-1.17.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a2a64e62c7a0edd07c1c917b0586655f3362d2c2d37d474db1a509efb96fea1c"}, - {file = "yarl-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d0eea830b591dbc68e030c86a9569826145df485b2b4554874b07fea1275a199"}, - {file = "yarl-1.17.1-cp312-cp312-win32.whl", hash = "sha256:46ddf6e0b975cd680eb83318aa1d321cb2bf8d288d50f1754526230fcf59ba96"}, - {file = "yarl-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:117ed8b3732528a1e41af3aa6d4e08483c2f0f2e3d3d7dca7cf538b3516d93df"}, - {file = "yarl-1.17.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5d1d42556b063d579cae59e37a38c61f4402b47d70c29f0ef15cee1acaa64488"}, - {file = "yarl-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c0167540094838ee9093ef6cc2c69d0074bbf84a432b4995835e8e5a0d984374"}, - {file = "yarl-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2f0a6423295a0d282d00e8701fe763eeefba8037e984ad5de44aa349002562ac"}, - {file = "yarl-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b078134f48552c4d9527db2f7da0b5359abd49393cdf9794017baec7506170"}, - {file = "yarl-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d401f07261dc5aa36c2e4efc308548f6ae943bfff20fcadb0a07517a26b196d8"}, - {file = "yarl-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5f1ac7359e17efe0b6e5fec21de34145caef22b260e978336f325d5c84e6938"}, - {file = "yarl-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f63d176a81555984e91f2c84c2a574a61cab7111cc907e176f0f01538e9ff6e"}, - {file = "yarl-1.17.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e275792097c9f7e80741c36de3b61917aebecc08a67ae62899b074566ff8556"}, - {file = "yarl-1.17.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:81713b70bea5c1386dc2f32a8f0dab4148a2928c7495c808c541ee0aae614d67"}, - {file = "yarl-1.17.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:aa46dce75078fceaf7cecac5817422febb4355fbdda440db55206e3bd288cfb8"}, - {file = "yarl-1.17.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1ce36ded585f45b1e9bb36d0ae94765c6608b43bd2e7f5f88079f7a85c61a4d3"}, - {file = "yarl-1.17.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:2d374d70fdc36f5863b84e54775452f68639bc862918602d028f89310a034ab0"}, - {file = "yarl-1.17.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:2d9f0606baaec5dd54cb99667fcf85183a7477f3766fbddbe3f385e7fc253299"}, - {file = "yarl-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b0341e6d9a0c0e3cdc65857ef518bb05b410dbd70d749a0d33ac0f39e81a4258"}, - {file = "yarl-1.17.1-cp313-cp313-win32.whl", hash = "sha256:2e7ba4c9377e48fb7b20dedbd473cbcbc13e72e1826917c185157a137dac9df2"}, - {file = "yarl-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:949681f68e0e3c25377462be4b658500e85ca24323d9619fdc41f68d46a1ffda"}, - {file = "yarl-1.17.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8994b29c462de9a8fce2d591028b986dbbe1b32f3ad600b2d3e1c482c93abad6"}, - {file = "yarl-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f9cbfbc5faca235fbdf531b93aa0f9f005ec7d267d9d738761a4d42b744ea159"}, - {file = "yarl-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b40d1bf6e6f74f7c0a567a9e5e778bbd4699d1d3d2c0fe46f4b717eef9e96b95"}, - {file = "yarl-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5efe0661b9fcd6246f27957f6ae1c0eb29bc60552820f01e970b4996e016004"}, - {file = "yarl-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5c4804e4039f487e942c13381e6c27b4b4e66066d94ef1fae3f6ba8b953f383"}, - {file = "yarl-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5d6a6c9602fd4598fa07e0389e19fe199ae96449008d8304bf5d47cb745462e"}, - {file = "yarl-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4c9156c4d1eb490fe374fb294deeb7bc7eaccda50e23775b2354b6a6739934"}, - {file = "yarl-1.17.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6324274b4e0e2fa1b3eccb25997b1c9ed134ff61d296448ab8269f5ac068c4c"}, - {file = "yarl-1.17.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d8a8b74d843c2638f3864a17d97a4acda58e40d3e44b6303b8cc3d3c44ae2d29"}, - {file = "yarl-1.17.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:7fac95714b09da9278a0b52e492466f773cfe37651cf467a83a1b659be24bf71"}, - {file = "yarl-1.17.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c180ac742a083e109c1a18151f4dd8675f32679985a1c750d2ff806796165b55"}, - {file = "yarl-1.17.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:578d00c9b7fccfa1745a44f4eddfdc99d723d157dad26764538fbdda37209857"}, - {file = "yarl-1.17.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1a3b91c44efa29e6c8ef8a9a2b583347998e2ba52c5d8280dbd5919c02dfc3b5"}, - {file = "yarl-1.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a7ac5b4984c468ce4f4a553df281450df0a34aefae02e58d77a0847be8d1e11f"}, - {file = "yarl-1.17.1-cp39-cp39-win32.whl", hash = "sha256:7294e38f9aa2e9f05f765b28ffdc5d81378508ce6dadbe93f6d464a8c9594473"}, - {file = "yarl-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:eb6dce402734575e1a8cc0bb1509afca508a400a57ce13d306ea2c663bad1138"}, - {file = "yarl-1.17.1-py3-none-any.whl", hash = "sha256:f1790a4b1e8e8e028c391175433b9c8122c39b46e1663228158e61e6f915bf06"}, - {file = "yarl-1.17.1.tar.gz", hash = "sha256:067a63fcfda82da6b198fa73079b1ca40b7c9b7994995b6ee38acda728b64d47"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"}, + {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"}, + {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"}, + {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"}, + {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"}, + {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"}, + {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"}, + {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"}, + {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"}, + {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"}, + {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"}, + {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"}, + {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"}, ] [package.dependencies] @@ -7641,4 +7863,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.12" -content-hash = "474ae44ef721bf9b2d34d1cd139cddf42542ef9167895960784b6e88214dd1e6" +content-hash = "cb3ff99a83c819eb2870ba5dc4f7b7058e7653e25a28d9429de8c75c38603146" diff --git a/pyproject.toml b/pyproject.toml index 92b70db63..e0da2f189 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,6 +70,7 @@ asyncpg = "0.30.0" pgvector = "^0.3.5" psycopg2 = {version = "^2.9.10", optional = true} llama-index-core = {version = "^0.11.22", optional = true} +deepeval = "^2.0.1" [tool.poetry.extras] filesystem = ["s3fs", "botocore"] From dc082de4c2aa9d9b48f373ce85c0ce924885bccd Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Mon, 2 Dec 2024 14:54:40 +0100 Subject: [PATCH 47/52] minor bugfix in folder creation --- evals/llm_as_a_judge.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/evals/llm_as_a_judge.py b/evals/llm_as_a_judge.py index 0398f9422..e4bb7998c 100644 --- a/evals/llm_as_a_judge.py +++ b/evals/llm_as_a_judge.py @@ -78,7 +78,7 @@ async def eval_on_hotpotQA(answer_provider, num_samples, eval_metric): base_config = get_base_config() data_root_dir = base_config.data_root_directory if not Path(data_root_dir).exists(): - data_root_dir.mkdir() + Path(data_root_dir).mkdir() filepath = data_root_dir / Path("hotpot_dev_fullwiki_v1.json") if not filepath.exists(): url = 'http://curtis.ml.cmu.edu/datasets/hotpot/hotpot_dev_fullwiki_v1.json' From 0fbb50960b683cbee291362c0bbcd759c1c5eb05 Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Tue, 3 Dec 2024 15:59:03 +0100 Subject: [PATCH 48/52] prompt renaming --- evals/llm_as_a_judge.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/evals/llm_as_a_judge.py b/evals/llm_as_a_judge.py index e4bb7998c..c84795642 100644 --- a/evals/llm_as_a_judge.py +++ b/evals/llm_as_a_judge.py @@ -24,7 +24,7 @@ async def answer_without_cognee(instance): "context": instance["context"], } user_prompt = render_prompt("context_for_question.txt", args) - system_prompt = read_query_prompt("answer_question.txt") + system_prompt = read_query_prompt("answer_hotpot_question.txt") llm_client = get_llm_client() answer_prediction = await llm_client.acreate_structured_output( @@ -50,7 +50,7 @@ async def answer_with_cognee(instance): "context": search_results, } user_prompt = render_prompt("context_for_question.txt", args) - system_prompt = read_query_prompt("answer_question_kg.txt") + system_prompt = read_query_prompt("answer_hotpot_using_cognee_search.txt") llm_client = get_llm_client() answer_prediction = await llm_client.acreate_structured_output( From b98470887e95c0d0101ea8a82a424bd0f921d823 Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Tue, 3 Dec 2024 16:35:49 +0100 Subject: [PATCH 49/52] fix env var issue --- cognee/__init__.py | 11 +- poetry.lock | 412 ++++++++++++++++++++++----------------------- pyproject.toml | 4 +- 3 files changed, 208 insertions(+), 219 deletions(-) diff --git a/cognee/__init__.py b/cognee/__init__.py index e89ef1dc3..591140b87 100644 --- a/cognee/__init__.py +++ b/cognee/__init__.py @@ -1,9 +1,14 @@ -from .api.v1.config.config import config from .api.v1.add import add from .api.v1.cognify import cognify +from .api.v1.config.config import config from .api.v1.datasets.datasets import datasets -from .api.v1.search import search, SearchType, get_search_history from .api.v1.prune import prune - +from .api.v1.search import SearchType, get_search_history, search # Pipelines from .modules import pipelines + +try: + import dotenv + dotenv.load_dotenv() +except ImportError: + pass diff --git a/poetry.lock b/poetry.lock index 0450b1b12..7f2780406 100644 --- a/poetry.lock +++ b/poetry.lock @@ -359,21 +359,6 @@ files = [ astroid = ["astroid (>=2,<4)"] test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] -[[package]] -name = "astunparse" -version = "1.6.3" -description = "An AST unparser for Python" -optional = false -python-versions = "*" -files = [ - {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, - {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, -] - -[package.dependencies] -six = ">=1.6.1,<2.0" -wheel = ">=0.23.0,<1.0" - [[package]] name = "async-lru" version = "2.0.4" @@ -601,17 +586,17 @@ css = ["tinycss2 (>=1.1.0,<1.5)"] [[package]] name = "boto3" -version = "1.35.72" +version = "1.35.73" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.72-py3-none-any.whl", hash = "sha256:410bb4ec676c57ee9c3c7824b7b1a3721584f18f8ee8ccc8e8ecdf285136b77f"}, - {file = "boto3-1.35.72.tar.gz", hash = "sha256:f9fc94413a959c388b1654c6687a5193293f3c69f8d0af3b86fd48b4096a23f3"}, + {file = "boto3-1.35.73-py3-none-any.whl", hash = "sha256:473438feafe77d29fbea532a91a65de0d8751a4fa5822127218710a205e28e7a"}, + {file = "boto3-1.35.73.tar.gz", hash = "sha256:ccb1a365d3084de53b58f8dfc056462f49b16931c139f4c8ac5f0bca8cb8fe81"}, ] [package.dependencies] -botocore = ">=1.35.72,<1.36.0" +botocore = ">=1.35.73,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -620,13 +605,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.72" +version = "1.35.73" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.72-py3-none-any.whl", hash = "sha256:7412877c3f766a1bfd09236e225ce1f0dc2c35e47949ae423e56e2093c8fa23a"}, - {file = "botocore-1.35.72.tar.gz", hash = "sha256:6b5fac38ef7cfdbc7781a751e0f78833ccb9149ba815bc238b1dbb75c90fbae5"}, + {file = "botocore-1.35.73-py3-none-any.whl", hash = "sha256:8a6a0f5ad119e38d850571df8c625dbad66aec1b20c15f84cdcb95258f9f1edb"}, + {file = "botocore-1.35.73.tar.gz", hash = "sha256:b2e3ecdd1769f011f72c4c0d0094570ba125f4ca327f24269e4d68eb5d9878b9"}, ] [package.dependencies] @@ -1133,7 +1118,7 @@ tests = ["pytest", "pytest-cov", "pytest-xdist"] name = "dataclasses-json" version = "0.6.7" description = "Easily serialize dataclasses to and from JSON." -optional = false +optional = true python-versions = "<4.0,>=3.7" files = [ {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, @@ -1232,7 +1217,7 @@ files = [ name = "deepeval" version = "2.0.1" description = "The open-source LLMs evaluation framework." -optional = false +optional = true python-versions = "<3.13,>=3.9" files = [ {file = "deepeval-2.0.1-py3-none-any.whl", hash = "sha256:e06134a36f6b2a9173c92bdebe1e42325f4e27ded77bbf4b5323fa955682b6ea"}, @@ -1282,7 +1267,7 @@ files = [ name = "deprecated" version = "1.2.15" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false +optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, @@ -1374,19 +1359,17 @@ files = [ [[package]] name = "dlt" -version = "1.4.0" +version = "1.4.1" description = "dlt is an open-source python-first scalable data loading library that does not require any backend to run." optional = false python-versions = "<3.13,>=3.8.1" files = [ - {file = "dlt-1.4.0-py3-none-any.whl", hash = "sha256:c3a69e4067581bf0335796bec62d58058ff1f11249f16b699d6657544b126247"}, - {file = "dlt-1.4.0.tar.gz", hash = "sha256:75208448dc11dd501cf15d76742368816fef8e1b22fb07417f69d5ceb720b324"}, + {file = "dlt-1.4.1-py3-none-any.whl", hash = "sha256:fdfda790084914902579c903882b75d3c683b7545e15f86240cd1005bb3e8e64"}, + {file = "dlt-1.4.1.tar.gz", hash = "sha256:73e797e8d1c220caa80d464aebdd23c300b1c8385fbb605df04167fc513f8670"}, ] [package.dependencies] -aiohttp = ">=3.9" alembic = {version = ">1.10.0", optional = true, markers = "extra == \"sqlalchemy\""} -astunparse = ">=1.6.3" click = ">=7.1" fsspec = ">=2022.4.0" gitpython = ">=3.1.29" @@ -1431,6 +1414,7 @@ lancedb = ["lancedb (>=0.8.2)", "pyarrow (>=12.0.0)", "tantivy (>=0.22.0)"] motherduck = ["duckdb (>=0.9)", "pyarrow (>=12.0.0)"] mssql = ["pyodbc (>=4.0.39)"] parquet = ["pyarrow (>=12.0.0)"] +postgis = ["psycopg2-binary (>=2.9.1)", "psycopg2cffi (>=2.9.0)"] postgres = ["psycopg2-binary (>=2.9.1)", "psycopg2cffi (>=2.9.0)"] qdrant = ["qdrant-client[fastembed] (>=1.8)"] redshift = ["psycopg2-binary (>=2.9.1)", "psycopg2cffi (>=2.9.0)"] @@ -1477,7 +1461,7 @@ files = [ name = "docx2txt" version = "0.8" description = "A pure python-based utility to extract text and images from docx files." -optional = false +optional = true python-versions = "*" files = [ {file = "docx2txt-0.8.tar.gz", hash = "sha256:2c06d98d7cfe2d3947e5760a57d924e3ff07745b379c8737723922e7009236e5"}, @@ -1516,7 +1500,7 @@ test = ["pytest (>=6)"] name = "execnet" version = "2.1.1" description = "execnet: rapid multi-Python deployment" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, @@ -1656,61 +1640,61 @@ files = [ [[package]] name = "fonttools" -version = "4.55.0" +version = "4.55.1" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.55.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:51c029d4c0608a21a3d3d169dfc3fb776fde38f00b35ca11fdab63ba10a16f61"}, - {file = "fonttools-4.55.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bca35b4e411362feab28e576ea10f11268b1aeed883b9f22ed05675b1e06ac69"}, - {file = "fonttools-4.55.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ce4ba6981e10f7e0ccff6348e9775ce25ffadbee70c9fd1a3737e3e9f5fa74f"}, - {file = "fonttools-4.55.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31d00f9852a6051dac23294a4cf2df80ced85d1d173a61ba90a3d8f5abc63c60"}, - {file = "fonttools-4.55.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e198e494ca6e11f254bac37a680473a311a88cd40e58f9cc4dc4911dfb686ec6"}, - {file = "fonttools-4.55.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7208856f61770895e79732e1dcbe49d77bd5783adf73ae35f87fcc267df9db81"}, - {file = "fonttools-4.55.0-cp310-cp310-win32.whl", hash = "sha256:e7e6a352ff9e46e8ef8a3b1fe2c4478f8a553e1b5a479f2e899f9dc5f2055880"}, - {file = "fonttools-4.55.0-cp310-cp310-win_amd64.whl", hash = "sha256:636caaeefe586d7c84b5ee0734c1a5ab2dae619dc21c5cf336f304ddb8f6001b"}, - {file = "fonttools-4.55.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fa34aa175c91477485c44ddfbb51827d470011e558dfd5c7309eb31bef19ec51"}, - {file = "fonttools-4.55.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:37dbb3fdc2ef7302d3199fb12468481cbebaee849e4b04bc55b77c24e3c49189"}, - {file = "fonttools-4.55.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5263d8e7ef3c0ae87fbce7f3ec2f546dc898d44a337e95695af2cd5ea21a967"}, - {file = "fonttools-4.55.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f307f6b5bf9e86891213b293e538d292cd1677e06d9faaa4bf9c086ad5f132f6"}, - {file = "fonttools-4.55.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f0a4b52238e7b54f998d6a56b46a2c56b59c74d4f8a6747fb9d4042190f37cd3"}, - {file = "fonttools-4.55.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3e569711464f777a5d4ef522e781dc33f8095ab5efd7548958b36079a9f2f88c"}, - {file = "fonttools-4.55.0-cp311-cp311-win32.whl", hash = "sha256:2b3ab90ec0f7b76c983950ac601b58949f47aca14c3f21eed858b38d7ec42b05"}, - {file = "fonttools-4.55.0-cp311-cp311-win_amd64.whl", hash = "sha256:aa046f6a63bb2ad521004b2769095d4c9480c02c1efa7d7796b37826508980b6"}, - {file = "fonttools-4.55.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:838d2d8870f84fc785528a692e724f2379d5abd3fc9dad4d32f91cf99b41e4a7"}, - {file = "fonttools-4.55.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f46b863d74bab7bb0d395f3b68d3f52a03444964e67ce5c43ce43a75efce9246"}, - {file = "fonttools-4.55.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33b52a9cfe4e658e21b1f669f7309b4067910321757fec53802ca8f6eae96a5a"}, - {file = "fonttools-4.55.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:732a9a63d6ea4a81b1b25a1f2e5e143761b40c2e1b79bb2b68e4893f45139a40"}, - {file = "fonttools-4.55.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7dd91ac3fcb4c491bb4763b820bcab6c41c784111c24172616f02f4bc227c17d"}, - {file = "fonttools-4.55.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f0e115281a32ff532118aa851ef497a1b7cda617f4621c1cdf81ace3e36fb0c"}, - {file = "fonttools-4.55.0-cp312-cp312-win32.whl", hash = "sha256:6c99b5205844f48a05cb58d4a8110a44d3038c67ed1d79eb733c4953c628b0f6"}, - {file = "fonttools-4.55.0-cp312-cp312-win_amd64.whl", hash = "sha256:f8c8c76037d05652510ae45be1cd8fb5dd2fd9afec92a25374ac82255993d57c"}, - {file = "fonttools-4.55.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8118dc571921dc9e4b288d9cb423ceaf886d195a2e5329cc427df82bba872cd9"}, - {file = "fonttools-4.55.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01124f2ca6c29fad4132d930da69158d3f49b2350e4a779e1efbe0e82bd63f6c"}, - {file = "fonttools-4.55.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ffd58d2691f11f7c8438796e9f21c374828805d33e83ff4b76e4635633674c"}, - {file = "fonttools-4.55.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5435e5f1eb893c35c2bc2b9cd3c9596b0fcb0a59e7a14121562986dd4c47b8dd"}, - {file = "fonttools-4.55.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d12081729280c39d001edd0f4f06d696014c26e6e9a0a55488fabc37c28945e4"}, - {file = "fonttools-4.55.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7ad1f1b98ab6cb927ab924a38a8649f1ffd7525c75fe5b594f5dab17af70e18"}, - {file = "fonttools-4.55.0-cp313-cp313-win32.whl", hash = "sha256:abe62987c37630dca69a104266277216de1023cf570c1643bb3a19a9509e7a1b"}, - {file = "fonttools-4.55.0-cp313-cp313-win_amd64.whl", hash = "sha256:2863555ba90b573e4201feaf87a7e71ca3b97c05aa4d63548a4b69ea16c9e998"}, - {file = "fonttools-4.55.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:00f7cf55ad58a57ba421b6a40945b85ac7cc73094fb4949c41171d3619a3a47e"}, - {file = "fonttools-4.55.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f27526042efd6f67bfb0cc2f1610fa20364396f8b1fc5edb9f45bb815fb090b2"}, - {file = "fonttools-4.55.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e67974326af6a8879dc2a4ec63ab2910a1c1a9680ccd63e4a690950fceddbe"}, - {file = "fonttools-4.55.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61dc0a13451143c5e987dec5254d9d428f3c2789a549a7cf4f815b63b310c1cc"}, - {file = "fonttools-4.55.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b2e526b325a903868c62155a6a7e24df53f6ce4c5c3160214d8fe1be2c41b478"}, - {file = "fonttools-4.55.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b7ef9068a1297714e6fefe5932c33b058aa1d45a2b8be32a4c6dee602ae22b5c"}, - {file = "fonttools-4.55.0-cp38-cp38-win32.whl", hash = "sha256:55718e8071be35dff098976bc249fc243b58efa263768c611be17fe55975d40a"}, - {file = "fonttools-4.55.0-cp38-cp38-win_amd64.whl", hash = "sha256:553bd4f8cc327f310c20158e345e8174c8eed49937fb047a8bda51daf2c353c8"}, - {file = "fonttools-4.55.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f901cef813f7c318b77d1c5c14cf7403bae5cb977cede023e22ba4316f0a8f6"}, - {file = "fonttools-4.55.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c9679fc0dd7e8a5351d321d8d29a498255e69387590a86b596a45659a39eb0d"}, - {file = "fonttools-4.55.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd2820a8b632f3307ebb0bf57948511c2208e34a4939cf978333bc0a3f11f838"}, - {file = "fonttools-4.55.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23bbbb49bec613a32ed1b43df0f2b172313cee690c2509f1af8fdedcf0a17438"}, - {file = "fonttools-4.55.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a656652e1f5d55b9728937a7e7d509b73d23109cddd4e89ee4f49bde03b736c6"}, - {file = "fonttools-4.55.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f50a1f455902208486fbca47ce33054208a4e437b38da49d6721ce2fef732fcf"}, - {file = "fonttools-4.55.0-cp39-cp39-win32.whl", hash = "sha256:161d1ac54c73d82a3cded44202d0218ab007fde8cf194a23d3dd83f7177a2f03"}, - {file = "fonttools-4.55.0-cp39-cp39-win_amd64.whl", hash = "sha256:ca7fd6987c68414fece41c96836e945e1f320cda56fc96ffdc16e54a44ec57a2"}, - {file = "fonttools-4.55.0-py3-none-any.whl", hash = "sha256:12db5888cd4dd3fcc9f0ee60c6edd3c7e1fd44b7dd0f31381ea03df68f8a153f"}, - {file = "fonttools-4.55.0.tar.gz", hash = "sha256:7636acc6ab733572d5e7eec922b254ead611f1cdad17be3f0be7418e8bfaca71"}, + {file = "fonttools-4.55.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c17a6f9814f83772cd6d9c9009928e1afa4ab66210a31ced721556651075a9a0"}, + {file = "fonttools-4.55.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c4d14eecc814826a01db87a40af3407c892ba49996bc6e49961e386cd78b537c"}, + {file = "fonttools-4.55.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8589f9a15dc005592b94ecdc45b4dfae9bbe9e73542e89af5a5e776e745db83b"}, + {file = "fonttools-4.55.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfee95bd9395bcd9e6c78955387554335109b6a613db71ef006020b42f761c58"}, + {file = "fonttools-4.55.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:34fa2ecc0bf1923d1a51bf2216a006de2c3c0db02c6aa1470ea50b62b8619bd5"}, + {file = "fonttools-4.55.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9c1c48483148bfb1b9ad951133ceea957faa004f6cb475b67e7bc75d482b48f8"}, + {file = "fonttools-4.55.1-cp310-cp310-win32.whl", hash = "sha256:3e2fc388ca7d023b3c45badd71016fd4185f93e51a22cfe4bd65378af7fba759"}, + {file = "fonttools-4.55.1-cp310-cp310-win_amd64.whl", hash = "sha256:c4c36c71f69d2b3ee30394b0986e5f8b2c461e7eff48dde49b08a90ded9fcdbd"}, + {file = "fonttools-4.55.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5daab3a55d460577f45bb8f5a8eca01fa6cde43ef2ab943b527991f54b735c41"}, + {file = "fonttools-4.55.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:acf1e80cf96c2fbc79e46f669d8713a9a79faaebcc68e31a9fbe600cf8027992"}, + {file = "fonttools-4.55.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e88a0329f7f88a210f09f79c088fb64f8032fc3ab65e2390a40b7d3a11773026"}, + {file = "fonttools-4.55.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03105b42259a8a94b2f0cbf1bee45f7a8a34e7b26c946a8fb89b4967e44091a8"}, + {file = "fonttools-4.55.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9af3577e821649879ab5774ad0e060af34816af556c77c6d3820345d12bf415e"}, + {file = "fonttools-4.55.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:34bd5de3d0ad085359b79a96575cd6bd1bc2976320ef24a2aa152ead36dbf656"}, + {file = "fonttools-4.55.1-cp311-cp311-win32.whl", hash = "sha256:5da92c4b637f0155a41f345fa81143c8e17425260fcb21521cb2ad4d2cea2a95"}, + {file = "fonttools-4.55.1-cp311-cp311-win_amd64.whl", hash = "sha256:f70234253d15f844e6da1178f019a931f03181463ce0c7b19648b8c370527b07"}, + {file = "fonttools-4.55.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9c372e527d58ba64b695f15f8014e97bc8826cf64d3380fc89b4196edd3c0fa8"}, + {file = "fonttools-4.55.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:845a967d3bef3245ba81fb5582dc731f6c2c8417fa211f1068c56893504bc000"}, + {file = "fonttools-4.55.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03be82bcd4ba4418adf10e6165743f824bb09d6594c2743d7f93ea50968805b"}, + {file = "fonttools-4.55.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c42e935cf146f826f556d977660dac88f2fa3fb2efa27d5636c0b89a60c16edf"}, + {file = "fonttools-4.55.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:96328bf91e05621d8e40d9f854af7a262cb0e8313e9b38e7f3a7f3c4c0caaa8b"}, + {file = "fonttools-4.55.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:291acec4d774e8cd2d8472d88c04643a77a3324a15247951bd6cfc969799b69e"}, + {file = "fonttools-4.55.1-cp312-cp312-win32.whl", hash = "sha256:6d768d6632809aec1c3fa8f195b173386d85602334701a6894a601a4d3c80368"}, + {file = "fonttools-4.55.1-cp312-cp312-win_amd64.whl", hash = "sha256:2a3850afdb0be1f79a1e95340a2059226511675c5b68098d4e49bfbeb48a8aab"}, + {file = "fonttools-4.55.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0c88d427eaf8bd8497b9051f56e0f5f9fb96a311aa7c72cda35e03e18d59cd16"}, + {file = "fonttools-4.55.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f062c95a725a79fd908fe8407b6ad63e230e1c7d6dece2d5d6ecaf843d6927f6"}, + {file = "fonttools-4.55.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f298c5324c45cad073475146bf560f4110ce2dc2488ff12231a343ec489f77bc"}, + {file = "fonttools-4.55.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f06dbb71344ffd85a6cb7e27970a178952f0bdd8d319ed938e64ba4bcc41700"}, + {file = "fonttools-4.55.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4c46b3525166976f5855b1f039b02433dc51eb635fb54d6a111e0c5d6e6cdc4c"}, + {file = "fonttools-4.55.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:af46f52a21e086a2f89b87bd941c9f0f91e5f769e1a5eb3b37c912228814d3e5"}, + {file = "fonttools-4.55.1-cp313-cp313-win32.whl", hash = "sha256:cd7f36335c5725a3fd724cc667c10c3f5254e779bdc5bffefebb33cf5a75ecb1"}, + {file = "fonttools-4.55.1-cp313-cp313-win_amd64.whl", hash = "sha256:5d6394897710ccac7f74df48492d7f02b9586ff0588c66a2c218844e90534b22"}, + {file = "fonttools-4.55.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:52c4f4b383c56e1a4fe8dab1b63c2269ba9eab0695d2d8e033fa037e61e6f1ef"}, + {file = "fonttools-4.55.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d83892dafdbd62b56545c77b6bd4fa49eef6ec1d6b95e042ee2c930503d1831e"}, + {file = "fonttools-4.55.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604d5bf16f811fcaaaec2dde139f7ce958462487565edcd54b6fadacb2942083"}, + {file = "fonttools-4.55.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3324b92feb5fd084923a8e89a8248afd5b9f9d81ab9517d7b07cc84403bd448"}, + {file = "fonttools-4.55.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:30f8b1ca9b919c04850678d026fc330c19acaa9e3b282fcacc09a5eb3c8d20c3"}, + {file = "fonttools-4.55.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:1835c98df2cf28c86a66d234895c87df7b9325fd079a8019c5053a389ff55d23"}, + {file = "fonttools-4.55.1-cp38-cp38-win32.whl", hash = "sha256:9f202703720a7cc0049f2ed1a2047925e264384eb5cc4d34f80200d7b17f1b6a"}, + {file = "fonttools-4.55.1-cp38-cp38-win_amd64.whl", hash = "sha256:2efff20aed0338d37c2ff58766bd67f4b9607ded61cf3d6baf1b3e25ea74e119"}, + {file = "fonttools-4.55.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3032d9bf010c395e6eca2851666cafb1f4ecde85d420188555e928ad0144326e"}, + {file = "fonttools-4.55.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0794055588c30ffe25426048e8a7c0a5271942727cd61fc939391e37f4d580d5"}, + {file = "fonttools-4.55.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13ba980e3ffd3206b8c63a365f90dc10eeec27da946d5ee5373c3a325a46d77c"}, + {file = "fonttools-4.55.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d7063babd7434a17a5e355e87de9b2306c85a5c19c7da0794be15c58aab0c39"}, + {file = "fonttools-4.55.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ed84c15144015a58ef550dd6312884c9fb31a2dbc31a6467bcdafd63be7db476"}, + {file = "fonttools-4.55.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e89419d88b0bbfdb55209e03a17afa2d20db3c2fa0d785543c9d0875668195d5"}, + {file = "fonttools-4.55.1-cp39-cp39-win32.whl", hash = "sha256:6eb781e401b93cda99356bc043ababead2a5096550984d8a4ecf3d5c9f859dc2"}, + {file = "fonttools-4.55.1-cp39-cp39-win_amd64.whl", hash = "sha256:db1031acf04523c5a51c3e1ae19c21a1c32bc5f820a477dd4659a02f9cb82002"}, + {file = "fonttools-4.55.1-py3-none-any.whl", hash = "sha256:4bcfb11f90f48b48c366dd638d773a52fca0d1b9e056dc01df766bf5835baa08"}, + {file = "fonttools-4.55.1.tar.gz", hash = "sha256:85bb2e985718b0df96afc659abfe194c171726054314b019dbbfed31581673c7"}, ] [package.extras] @@ -1945,7 +1929,7 @@ files = [ name = "googleapis-common-protos" version = "1.66.0" description = "Common protobufs used in Google APIs" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed"}, @@ -2397,7 +2381,7 @@ socks = ["socksio (==1.*)"] name = "httpx-sse" version = "0.4.0" description = "Consume Server-Sent Event (SSE) messages with HTTPX." -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, @@ -3264,7 +3248,7 @@ tests = ["aiohttp", "boto3", "duckdb", "pandas (>=1.4)", "polars (>=0.19,<=1.3.0 name = "langchain" version = "0.3.9" description = "Building applications with LLMs through composability" -optional = false +optional = true python-versions = "<4.0,>=3.9" files = [ {file = "langchain-0.3.9-py3-none-any.whl", hash = "sha256:ade5a1fee2f94f2e976a6c387f97d62cc7f0b9f26cfe0132a41d2bda761e1045"}, @@ -3286,13 +3270,13 @@ tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10" [[package]] name = "langchain-community" -version = "0.3.8" +version = "0.3.9" description = "Community contributed LangChain integrations." -optional = false +optional = true python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_community-0.3.8-py3-none-any.whl", hash = "sha256:191b3fcdf6b2e92934f4daeba5f5d0ac684b03772b15ef9d3c3fbcd86bd6cd64"}, - {file = "langchain_community-0.3.8.tar.gz", hash = "sha256:f7575a717d95208d0e969c090104622783c6a38a5527657aa5aa38776fadc835"}, + {file = "langchain_community-0.3.9-py3-none-any.whl", hash = "sha256:ccccf9e703ccb7d929034be56e36177e3ee796e5ab8417aa79c25dc6ef40e1bd"}, + {file = "langchain_community-0.3.9.tar.gz", hash = "sha256:b0b44c530c7647a360f2321749e7b7e95a3cbdfa2fceed7e1214228833996223"}, ] [package.dependencies] @@ -3306,7 +3290,7 @@ numpy = {version = ">=1.22.4,<2", markers = "python_version < \"3.12\""} pydantic-settings = ">=2.4.0,<3.0.0" PyYAML = ">=5.3" requests = ">=2,<3" -SQLAlchemy = ">=1.4,<2.0.36" +SQLAlchemy = ">=1.4,<3" tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10" [[package]] @@ -3333,7 +3317,7 @@ typing-extensions = ">=4.7" name = "langchain-openai" version = "0.2.5" description = "An integration package connecting OpenAI and LangChain" -optional = false +optional = true python-versions = "<4.0,>=3.9" files = [ {file = "langchain_openai-0.2.5-py3-none-any.whl", hash = "sha256:745fd9d51a5a3a9cb8839d41f3786ab38dfc539e47c713a806cbca32f3d0875c"}, @@ -3626,7 +3610,7 @@ files = [ name = "marshmallow" version = "3.23.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -optional = false +optional = true python-versions = ">=3.9" files = [ {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"}, @@ -4416,7 +4400,7 @@ datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] name = "opentelemetry-api" version = "1.27.0" description = "OpenTelemetry Python API" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"}, @@ -4431,7 +4415,7 @@ importlib-metadata = ">=6.0,<=8.4.0" name = "opentelemetry-exporter-otlp-proto-common" version = "1.27.0" description = "OpenTelemetry Protobuf encoding" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "opentelemetry_exporter_otlp_proto_common-1.27.0-py3-none-any.whl", hash = "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a"}, @@ -4445,7 +4429,7 @@ opentelemetry-proto = "1.27.0" name = "opentelemetry-exporter-otlp-proto-grpc" version = "1.27.0" description = "OpenTelemetry Collector Protobuf over gRPC Exporter" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0-py3-none-any.whl", hash = "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e"}, @@ -4465,7 +4449,7 @@ opentelemetry-sdk = ">=1.27.0,<1.28.0" name = "opentelemetry-proto" version = "1.27.0" description = "OpenTelemetry Python Proto" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "opentelemetry_proto-1.27.0-py3-none-any.whl", hash = "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace"}, @@ -4479,7 +4463,7 @@ protobuf = ">=3.19,<5.0" name = "opentelemetry-sdk" version = "1.27.0" description = "OpenTelemetry Python SDK" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d"}, @@ -4495,7 +4479,7 @@ typing-extensions = ">=3.7.4" name = "opentelemetry-semantic-conventions" version = "0.48b0" description = "OpenTelemetry Semantic Conventions" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f"}, @@ -5063,13 +5047,13 @@ test = ["coverage", "django", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)" [[package]] name = "prometheus-client" -version = "0.21.0" +version = "0.21.1" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"}, - {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"}, + {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, + {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, ] [package.extras] @@ -5685,7 +5669,7 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy name = "pytest-repeat" version = "0.9.3" description = "pytest plugin for repeating tests" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "pytest_repeat-0.9.3-py3-none-any.whl", hash = "sha256:26ab2df18226af9d5ce441c858f273121e92ff55f5bb311d25755b8d7abdd8ed"}, @@ -5699,7 +5683,7 @@ pytest = "*" name = "pytest-xdist" version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" -optional = false +optional = true python-versions = ">=3.8" files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, @@ -6289,101 +6273,112 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.21.0" +version = "0.22.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" files = [ - {file = "rpds_py-0.21.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590"}, - {file = "rpds_py-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664"}, - {file = "rpds_py-0.21.0-cp310-none-win32.whl", hash = "sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682"}, - {file = "rpds_py-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5"}, - {file = "rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95"}, - {file = "rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8"}, - {file = "rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a"}, - {file = "rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e"}, - {file = "rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d"}, - {file = "rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11"}, - {file = "rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952"}, - {file = "rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd"}, - {file = "rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937"}, - {file = "rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976"}, - {file = "rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202"}, - {file = "rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e"}, - {file = "rpds_py-0.21.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928"}, - {file = "rpds_py-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed"}, - {file = "rpds_py-0.21.0-cp39-none-win32.whl", hash = "sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8"}, - {file = "rpds_py-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89"}, - {file = "rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db"}, + {file = "rpds_py-0.22.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a4366f264fa60d3c109f0b27af0cd9eb8d46746bd70bd3d9d425f035b6c7e286"}, + {file = "rpds_py-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e34a3e665d38d0749072e6565400c8ce9abae976e338919a0dfbfb0e1ba43068"}, + {file = "rpds_py-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38cacf1f378571450576f2c8ce87da6f3fddc59d744de5c12b37acc23285b1e1"}, + {file = "rpds_py-0.22.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8cbb040fec8eddd5a6a75e737fd73c9ce37e51f94bacdd0b178d0174a4758395"}, + {file = "rpds_py-0.22.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d80fd710b3307a3c63809048b72c536689b9b0b31a2518339c3f1a4d29c73d7a"}, + {file = "rpds_py-0.22.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b5d17d8f5b885ce50e0cda85f99c0719e365e98b587338535fa566a48375afb"}, + {file = "rpds_py-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7a048ec1ebc991331d709be4884dc318c9eaafa66dcde8be0933ac0e702149"}, + {file = "rpds_py-0.22.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:306da3dfa174b489a3fc63b0872e2226a5ddf94c59875a770d72aff945d5ed96"}, + {file = "rpds_py-0.22.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c7b4450093c0c909299770226fb0285be47b0a57545bae25b5c4e51566b0e587"}, + {file = "rpds_py-0.22.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0903ffdb5b9007e503203b6285e4ff0faf96d875c19f1d103b475acf7d9f7311"}, + {file = "rpds_py-0.22.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d1522025cda9e57329aade769f56e5793b2a5da7759a21914ee10e67e17e601e"}, + {file = "rpds_py-0.22.0-cp310-cp310-win32.whl", hash = "sha256:49e084d47a66027ac72844f9f52f13d347a9a1f05d4f84381b420e47f836a7fd"}, + {file = "rpds_py-0.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:d9ceca96df54cb1675a0b7f52f1c6d5d1df62c5b40741ba211780f1b05a282a2"}, + {file = "rpds_py-0.22.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:771c9a3851beaa617d8c8115d65f834a2b52490f42ee2b88b13f1fc5529e9e0c"}, + {file = "rpds_py-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:341a07a4b55126bfae68c9bf24220a73d456111e5eb3dcbdab9fd16de2341224"}, + {file = "rpds_py-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7649c8b8e4bd1ccc5fcbd51a855d57a617deeba19c66e3d04b1abecc61036b2"}, + {file = "rpds_py-0.22.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f513758e7cda8bc262e80299a8e3395d7ef7f4ae705be62632f229bc6c33208"}, + {file = "rpds_py-0.22.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba1fc34d0b2f6fd53377a4c954116251eba6d076bf64f903311f4a7d27d10acd"}, + {file = "rpds_py-0.22.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:632d2fdddd9fbe3ac8896a119fd18a71fc95ca9c4cbe5223096c142d8c4a2b1d"}, + {file = "rpds_py-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:326e42f2b49462e05f8527a1311ce98f9f97c484b3e443ec0ea4638bed3aebcf"}, + {file = "rpds_py-0.22.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e9bbdba9e75b1a9ee1dd1335034dad998ef1acc08492226c6fd50aa773bdfa7d"}, + {file = "rpds_py-0.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:41f65a97bf2c4b161c9f8f89bc37058346bec9b36e373c8ad00a16c957bff625"}, + {file = "rpds_py-0.22.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0686f2c16eafdc2c6b4ce6e86e5b3092e87db09ae64be2787616444eb35b9756"}, + {file = "rpds_py-0.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e7c9aa2353eb0b0d845323857197daa036c2ff8624df990b0d886d22a8f665e"}, + {file = "rpds_py-0.22.0-cp311-cp311-win32.whl", hash = "sha256:2d2fc3ab021be3e0b5aec6d4164f2689d231b8bfc5185cc454314746aa4aee72"}, + {file = "rpds_py-0.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:87453d491369cd8018016d2714a13e8461975161703c18ee31eecf087a8ae5d4"}, + {file = "rpds_py-0.22.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e9d4293b21c69ee4f9e1a99ac4f772951d345611c614a0cfae2ec6b565279bc9"}, + {file = "rpds_py-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:67e013a17a3db4d98cc228fd5aeb36a51b0f5cf7330b9102a552060f1fe4e560"}, + {file = "rpds_py-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b639a19e1791b646d27f15d17530a51722cc728d43b2dff3aeb904f92d91bac"}, + {file = "rpds_py-0.22.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1357c3092702078b7782b6ebd5ba9b22c1a291c34fbf9d8f1a48237466ac7758"}, + {file = "rpds_py-0.22.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:842855bbb113a19c393c6de5aa6ed9a26c6b13c2fead5e49114d39f0d08b94d8"}, + {file = "rpds_py-0.22.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ae7927cd2b869ca4dc645169d8af5494a29c99afd0ea0f24dd00c811ab1d8b8"}, + {file = "rpds_py-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91bfef5daa2a5a4fe62f8d317fc91a626073639f951f851bd2cb252d01bc6c5"}, + {file = "rpds_py-0.22.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fc4824e38c1e91a73bc820e7caacaf19d0acd557465aceef0420ca59489b390"}, + {file = "rpds_py-0.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:92d28a608127b357da47c99e0d0e0655ca2060286540fe9f2a25a2e8ac666e05"}, + {file = "rpds_py-0.22.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c637188b930175c256f13adbfc427b83ec7e64476d1ec9d6608f312bb84e06c3"}, + {file = "rpds_py-0.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:93bbd66f46dddc41e8c656130c97c0fb515e0fa44e1eebb2592769dbbd41b2f5"}, + {file = "rpds_py-0.22.0-cp312-cp312-win32.whl", hash = "sha256:54d8f94dec5765a9edc19610fecf0fdf9cab36cbb9def1213188215f735a6f98"}, + {file = "rpds_py-0.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:931bf3d0705b2834fed29354f35170fa022fe22a95542b61b7c66aca5f8a224f"}, + {file = "rpds_py-0.22.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2a57300cc8b034c5707085249efd09f19116bb80278d0ec925d7f3710165c510"}, + {file = "rpds_py-0.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c398a5a8e258dfdc5ea2aa4e5aa2ca3207f654a8eb268693dd1a76939074a588"}, + {file = "rpds_py-0.22.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a6cc4eb1e86364331928acafb2bb41d8ab735ca3caf2d6019b9f6dac3f4f65d"}, + {file = "rpds_py-0.22.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:574c5c94213bc9990805bfd7e4ba3826d3c098516cbc19f0d0ef0433ad93fa06"}, + {file = "rpds_py-0.22.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c0321bc03a1c513eca1837e3bba948b975bcf3a172aebc197ab3573207f137a"}, + {file = "rpds_py-0.22.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d276280649305c1da6cdd84585d48ae1f0efa67434d8b10d2df95228e59a05bb"}, + {file = "rpds_py-0.22.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c17b43fe9c6da16885e3fe28922bcd1a029e61631fb771c7d501019b40bcc904"}, + {file = "rpds_py-0.22.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:48c95997af9314f4034fe5ba2d837399e786586e220835a578d28fe8161e6ae5"}, + {file = "rpds_py-0.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e9aa4af6b879bb75a3c7766fbf49d77f4097dd12b548ecbbd8b3f85caa833281"}, + {file = "rpds_py-0.22.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8426f97117b914b9bfb2a7bd46edc148e8defda728a55a5df3a564abe70cd7a4"}, + {file = "rpds_py-0.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:034964ea0ea09645bdde13038b38abb14be0aa747f20fcfab6181207dd9e0483"}, + {file = "rpds_py-0.22.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:3dc7c64b56b82428894f056e9ff6e8ee917ff74fc26b65211a33602c2372e928"}, + {file = "rpds_py-0.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:1212cb231f2002934cd8d71a0d718fdd9d9a2dd671e0feef8501038df3508026"}, + {file = "rpds_py-0.22.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f21e1278c9456cd601832375c778ca44614d3433996488221a56572c223f04a"}, + {file = "rpds_py-0.22.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:875fe8dffb43c20f68379ee098b035a7038d7903c795d46715f66575a7050b19"}, + {file = "rpds_py-0.22.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e23dcdd4b2ff9c6b3317ea7921b210d39592f8ca1cdea58ada25b202c65c0a69"}, + {file = "rpds_py-0.22.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0fb8efc9e579acf1e556fd86277fecec320c21ca9b5d39db96433ad8c45bc4a"}, + {file = "rpds_py-0.22.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe23687924b25a2dee52fab15976fd6577ed8518072bcda9ff2e2b88ab1f168b"}, + {file = "rpds_py-0.22.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5469b347445d1c31105f33e7bfc9a8ba213d48e42641a610dda65bf9e3c83f5"}, + {file = "rpds_py-0.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a810a57ce5e8ecf8eac6ec4dab534ff80c34e5a2c31db60e992009cd20f58e0f"}, + {file = "rpds_py-0.22.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d9bb9242b38a664f307b3b897f093896f7ed51ef4fe25a0502e5a368de9151ea"}, + {file = "rpds_py-0.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:b4660943030406aaa40ec9f51960dd88049903d9536bc3c8ebb5cc4e1f119bbe"}, + {file = "rpds_py-0.22.0-cp313-cp313t-win32.whl", hash = "sha256:208ce1d8e3af138d1d9b21d7206356b7f29b96675e0113aea652cf024e4ddfdc"}, + {file = "rpds_py-0.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e6da2e0500742e0f157f005924a0589f2e2dcbfdd6cd0cc0abce367433e989be"}, + {file = "rpds_py-0.22.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f980a0640599a74f27fd9d50c84c293f1cb7afc2046c5c6d3efaf8ec7cdbc326"}, + {file = "rpds_py-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ca505fd3767a09a139737f3278bc8a485cb64043062da89bcba27e2f2ea78d33"}, + {file = "rpds_py-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba235e00e0878ba1080b0f2a761f143b2a2d1c354f3d8e507fbf2f3de401bf18"}, + {file = "rpds_py-0.22.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:81e7a27365b02fe70a77f1365376879917235b3fec551d19b4c91b51d0bc1d07"}, + {file = "rpds_py-0.22.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32a0e24cab2daae0503b06666d516e90a080c1a95aff0406b9f03c6489177c4b"}, + {file = "rpds_py-0.22.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a73ed43d64209e853bba567a543170267a5cd64f359540b0ca2d597e329ba172"}, + {file = "rpds_py-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0abcce5e874474d3eab5ad53be03dae2abe651d248bdeaabe83708e82969e78"}, + {file = "rpds_py-0.22.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f4e9946c8c7def17e4fcb5eddb14c4eb6ebc7f6f309075e6c8d23b133c104607"}, + {file = "rpds_py-0.22.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:758098b38c344d9a7f279baf0689261777e601f620078ef5afdc9bd3339965c3"}, + {file = "rpds_py-0.22.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9ad4640a409bc2b7d22b7921e7660f0db96c5c8c69fbb2e8f3261d4f71d33983"}, + {file = "rpds_py-0.22.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8c48fc7458fe3a74dcdf56ba3534ff41bd421f69436df09ff3497fdaac18b431"}, + {file = "rpds_py-0.22.0-cp39-cp39-win32.whl", hash = "sha256:fde778947304e55fc732bc8ea5c6063e74244ac1808471cb498983a210aaf62c"}, + {file = "rpds_py-0.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:5fdf91a7c07f40e47b193f2acae0ed9da35d09325d7c3c3279f722b7cbf3d264"}, + {file = "rpds_py-0.22.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c8fd7a16f7a047e06c747cfcf2acef3ac316132df1c6077445b29ee6f3f3a70b"}, + {file = "rpds_py-0.22.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6b6e4bcfc32f831bfe3d6d8a5acedfbfd5e252a03c83fa24813b277a3a8a13ca"}, + {file = "rpds_py-0.22.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eadd2417e83a77ce3ae4a0efd08cb0ebdfd317b6406d11020354a53ad458ec84"}, + {file = "rpds_py-0.22.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9dc2113e0cf0dd637751ca736186fca63664939ceb9f9f67e93ade88c69c0c9"}, + {file = "rpds_py-0.22.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc2c00acdf68f1f69a476b770af311a7dc3955b7de228b04a40bcc51ac4d743b"}, + {file = "rpds_py-0.22.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dfdabdf8519c93908b2bf0f87c3f86f9e88bab279fb4acfd0907519ca5a1739f"}, + {file = "rpds_py-0.22.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8338db3c76833d02dc21c3e2c42534091341d26e4f7ba32c6032bb558a02e07b"}, + {file = "rpds_py-0.22.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8ad4dfda52e64af3202ceb2143a62deba97894b71c64a4405ee80f6b3ea77285"}, + {file = "rpds_py-0.22.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3b94b074dcce39976db22ea75c7aea8b22d95e6d3b62f76e20e1179a278521d8"}, + {file = "rpds_py-0.22.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d4f2af3107fe4dc40c0d1a2409863f5249c6796398a1d83c1d99a0b3fa6cfb8d"}, + {file = "rpds_py-0.22.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:bb11809b0de643a292a82f728c494a2bbef0e30a7c42d37464abbd6bef7ca7b1"}, + {file = "rpds_py-0.22.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c1c21030ed494deb10226f90e2dbd84a012d59810c409832714a3dd576527be2"}, + {file = "rpds_py-0.22.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:64a0c965a1e299c9b280006bdb15c276c427c45360aed676305dc36bcaa4d13c"}, + {file = "rpds_py-0.22.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:2498ff422823be087b48bc82710deb87ac34f6b7c8034ee39920647647de1e60"}, + {file = "rpds_py-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59e63da174ff287db05ef7c21d75974a5bac727ed60452aeb3a14278477842a8"}, + {file = "rpds_py-0.22.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1c04fb380bc8efaae2fdf17ed6cd5d223da78a8b0b18a610f53d4c5d6e31dfd"}, + {file = "rpds_py-0.22.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04919ffa9a728c446b27b6b625fa1d00ece221bdb9d633e978a7e0353a12c0e"}, + {file = "rpds_py-0.22.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:24c28df05bd284879d0fac850ba697077d2a33b7ebcaea6318d6b6cdfdc86ddc"}, + {file = "rpds_py-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d33622dc63c295788eed09dbb1d11bed178909d3267b02d873116ee6be368244"}, + {file = "rpds_py-0.22.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7539dbb8f705e13629ba6f23388976aad809e387f32a6e5c0712e4e8d9bfcce7"}, + {file = "rpds_py-0.22.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:b8906f537978da3f7f0bd1ba37b69f6a877bb43312023b086582707d2835bf2f"}, + {file = "rpds_py-0.22.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:62ab12fe03ffc49978d29de9c31bbb216610157f7e5ca8e172fed6642aead3be"}, + {file = "rpds_py-0.22.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:762206ba3bf1d6c8c9e0055871d3c0d5b074b7c3120193e6c067e7866f106ab1"}, + {file = "rpds_py-0.22.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ed0102146574e5e9f079b2e1a06e6b5b12a691f9c74a65b93b7f3d4feda566c6"}, + {file = "rpds_py-0.22.0.tar.gz", hash = "sha256:32de71c393f126d8203e9815557c7ff4d72ed1ad3aa3f52f6c7938413176750a"}, ] [[package]] @@ -6936,7 +6931,7 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" -optional = false +optional = true python-versions = ">=3.7" files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, @@ -7228,13 +7223,13 @@ test = ["vcrpy (>=1.10.3)"] [[package]] name = "typer" -version = "0.14.0" +version = "0.15.0" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" files = [ - {file = "typer-0.14.0-py3-none-any.whl", hash = "sha256:f476233a25770ab3e7b2eebf7c68f3bc702031681a008b20167573a4b7018f09"}, - {file = "typer-0.14.0.tar.gz", hash = "sha256:af58f737f8d0c0c37b9f955a6d39000b9ff97813afcbeef56af5e37cf743b45a"}, + {file = "typer-0.15.0-py3-none-any.whl", hash = "sha256:bd16241db7e0f989ce1a0d8faa5aa1e43b9b9ac3fd1d4b8bcff91503d6717e38"}, + {file = "typer-0.15.0.tar.gz", hash = "sha256:8995452a598922ed8d8ad8c06ca63a218881ab601f5fa6fb0c511f7776497c7e"}, ] [package.dependencies] @@ -7280,7 +7275,7 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." -optional = false +optional = true python-versions = "*" files = [ {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, @@ -7493,20 +7488,6 @@ docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] -[[package]] -name = "wheel" -version = "0.45.1" -description = "A built-package format for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "wheel-0.45.1-py3-none-any.whl", hash = "sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248"}, - {file = "wheel-0.45.1.tar.gz", hash = "sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729"}, -] - -[package.extras] -test = ["pytest (>=6.0.0)", "setuptools (>=65)"] - [[package]] name = "win-precise-time" version = "1.4.2" @@ -7852,6 +7833,7 @@ type = ["pytest-mypy"] [extras] cli = [] +deepeval = ["deepeval"] filesystem = ["botocore"] llama-index = ["llama-index-core"] neo4j = ["neo4j"] @@ -7863,4 +7845,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.12" -content-hash = "cb3ff99a83c819eb2870ba5dc4f7b7058e7653e25a28d9429de8c75c38603146" +content-hash = "b63498e7aa23cfe29d8bea1fc29b0fe4a5f1a9e8ae5ec75d45b4bd20438e26f9" diff --git a/pyproject.toml b/pyproject.toml index e0da2f189..23aba656b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,7 @@ asyncpg = "0.30.0" pgvector = "^0.3.5" psycopg2 = {version = "^2.9.10", optional = true} llama-index-core = {version = "^0.11.22", optional = true} -deepeval = "^2.0.1" +deepeval = {version = "^2.0.1", optional = true} [tool.poetry.extras] filesystem = ["s3fs", "botocore"] @@ -81,6 +81,8 @@ neo4j = ["neo4j"] postgres = ["psycopg2", "pgvector", "asyncpg"] notebook = ["ipykernel", "overrides", "ipywidgets", "jupyterlab", "jupyterlab_widgets", "jupyterlab-server", "jupyterlab-git"] llama-index = ["llama-index-core"] +deepeval = ["deepeval"] + [tool.poetry.group.dev.dependencies] From a0d5102bd8a704b5075c2e51fa27ddcdbb59e97e Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Tue, 3 Dec 2024 17:22:23 +0100 Subject: [PATCH 50/52] add some spaces for readability --- evals/llm_as_a_judge.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/evals/llm_as_a_judge.py b/evals/llm_as_a_judge.py index c84795642..4deee7d7d 100644 --- a/evals/llm_as_a_judge.py +++ b/evals/llm_as_a_judge.py @@ -37,8 +37,10 @@ async def answer_without_cognee(instance): async def answer_with_cognee(instance): await cognee.prune.prune_data() await cognee.prune.prune_system(metadata=True) + for (title, sentences) in instance["context"]: await cognee.add("\n".join(sentences), dataset_name = "HotPotQA") + await cognee.cognify("HotPotQA") search_results = await cognee.search( @@ -58,11 +60,13 @@ async def answer_with_cognee(instance): system_prompt=system_prompt, response_model=str, ) + return answer_prediction async def eval_answers(instances, answers, eval_metric): test_cases = [] + for instance, answer in zip(instances, answers): test_case = LLMTestCase( input=instance["question"], @@ -70,19 +74,24 @@ async def eval_answers(instances, answers, eval_metric): expected_output=instance["answer"] ) test_cases.append(test_case) + eval_set = EvaluationDataset(test_cases) eval_results = eval_set.evaluate([eval_metric]) + return eval_results async def eval_on_hotpotQA(answer_provider, num_samples, eval_metric): base_config = get_base_config() data_root_dir = base_config.data_root_directory + if not Path(data_root_dir).exists(): Path(data_root_dir).mkdir() + filepath = data_root_dir / Path("hotpot_dev_fullwiki_v1.json") if not filepath.exists(): url = 'http://curtis.ml.cmu.edu/datasets/hotpot/hotpot_dev_fullwiki_v1.json' wget.download(url, out=data_root_dir) + with open(filepath, "r") as file: dataset = json.load(file) @@ -91,15 +100,19 @@ async def eval_on_hotpotQA(answer_provider, num_samples, eval_metric): for instance in tqdm(instances, desc="Getting answers"): answer = await answer_provider(instance) answers.append(answer) + eval_results = await eval_answers(instances, answers, eval_metric) avg_score = statistics.mean([result.metrics_data[0].score for result in eval_results.test_results]) + return avg_score if __name__ == "__main__": parser = argparse.ArgumentParser() + parser.add_argument("--with_cognee", action="store_true") parser.add_argument("--num_samples", type=int, default=500) parser.add_argument("--metric", type=str, default="correctness_metric") + args = parser.parse_args() try: @@ -107,9 +120,11 @@ if __name__ == "__main__": metric = metric_cls() except AttributeError: metric = getattr(evals.deepeval_metrics, args.metric) + if args.with_cognee: answer_provider = answer_with_cognee else: answer_provider = answer_without_cognee + avg_score = asyncio.run(eval_on_hotpotQA(answer_provider, args.num_samples, metric)) print(f"Average {args.metric}: {avg_score}") \ No newline at end of file From 27416afed07bc6e9c5c291227f7edc3232151c54 Mon Sep 17 00:00:00 2001 From: Boris Arzentar Date: Tue, 3 Dec 2024 21:13:50 +0100 Subject: [PATCH 51/52] fix: lancedb batch merge --- .../vector/lancedb/LanceDBAdapter.py | 16 +++--------- cognee/modules/engine/models/Entity.py | 1 + cognee/modules/engine/models/EntityType.py | 1 + .../utils/expand_with_nodes_and_edges.py | 26 +++++++++---------- .../graph/utils/get_graph_from_model.py | 4 +-- cognee/tasks/storage/add_data_points.py | 9 ++++++- 6 files changed, 29 insertions(+), 28 deletions(-) diff --git a/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py b/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py index 147b79c21..5a1d7be35 100644 --- a/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py +++ b/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py @@ -114,19 +114,11 @@ class LanceDBAdapter(VectorDBInterface): for (data_point_index, data_point) in enumerate(data_points) ] - # TODO: This enables us to work with pydantic version but shouldn't - # stay like this, existing rows should be updated + await collection.merge_insert("id") \ + .when_matched_update_all() \ + .when_not_matched_insert_all() \ + .execute(lance_data_points) - await collection.delete("id IS NOT NULL") - - original_size = await collection.count_rows() - await collection.add(lance_data_points) - new_size = await collection.count_rows() - - if new_size <= original_size: - raise InvalidValueError(message= - "LanceDB create_datapoints error: data points did not get added.") - async def retrieve(self, collection_name: str, data_point_ids: list[str]): connection = await self.get_connection() diff --git a/cognee/modules/engine/models/Entity.py b/cognee/modules/engine/models/Entity.py index adf22dfef..d013c395b 100644 --- a/cognee/modules/engine/models/Entity.py +++ b/cognee/modules/engine/models/Entity.py @@ -9,6 +9,7 @@ class Entity(DataPoint): is_a: EntityType description: str mentioned_in: DocumentChunk + _metadata: dict = { "index_fields": ["name"], } diff --git a/cognee/modules/engine/models/EntityType.py b/cognee/modules/engine/models/EntityType.py index 438191cd7..f7cf7867e 100644 --- a/cognee/modules/engine/models/EntityType.py +++ b/cognee/modules/engine/models/EntityType.py @@ -8,6 +8,7 @@ class EntityType(DataPoint): type: str description: str exists_in: DocumentChunk + _metadata: dict = { "index_fields": ["name"], } diff --git a/cognee/modules/graph/utils/expand_with_nodes_and_edges.py b/cognee/modules/graph/utils/expand_with_nodes_and_edges.py index 5b443d12a..4b8de868e 100644 --- a/cognee/modules/graph/utils/expand_with_nodes_and_edges.py +++ b/cognee/modules/graph/utils/expand_with_nodes_and_edges.py @@ -34,11 +34,11 @@ def expand_with_nodes_and_edges( if f"{str(type_node_id)}_type" not in added_nodes_map: type_node = EntityType( - id=type_node_id, - name=type_node_name, - type=type_node_name, - description=type_node_name, - exists_in=graph_source, + id = type_node_id, + name = type_node_name, + type = type_node_name, + description = type_node_name, + exists_in = graph_source, ) added_nodes_map[f"{str(type_node_id)}_type"] = type_node else: @@ -46,11 +46,11 @@ def expand_with_nodes_and_edges( if f"{str(node_id)}_entity" not in added_nodes_map: entity_node = Entity( - id=node_id, - name=node_name, - is_a=type_node, - description=node.description, - mentioned_in=graph_source, + id = node_id, + name = node_name, + is_a = type_node, + description = node.description, + mentioned_in = graph_source, ) data_points.append(entity_node) added_nodes_map[f"{str(node_id)}_entity"] = entity_node @@ -70,11 +70,11 @@ def expand_with_nodes_and_edges( target_node_id, edge.relationship_name, dict( - relationship_name=generate_edge_name( + relationship_name = generate_edge_name( edge.relationship_name ), - source_node_id=source_node_id, - target_node_id=target_node_id, + source_node_id = source_node_id, + target_node_id = target_node_id, ), ) ) diff --git a/cognee/modules/graph/utils/get_graph_from_model.py b/cognee/modules/graph/utils/get_graph_from_model.py index d49908354..4522e35dd 100644 --- a/cognee/modules/graph/utils/get_graph_from_model.py +++ b/cognee/modules/graph/utils/get_graph_from_model.py @@ -19,8 +19,8 @@ async def get_graph_from_model( data_point_properties = {} excluded_properties = set() - if include_root: - added_nodes[str(data_point.id)] = True + if str(data_point.id) in added_nodes: + return nodes, edges for field_name, field_value in data_point: if field_name == "_metadata": diff --git a/cognee/tasks/storage/add_data_points.py b/cognee/tasks/storage/add_data_points.py index c7af36007..67c3dd0c0 100644 --- a/cognee/tasks/storage/add_data_points.py +++ b/cognee/tasks/storage/add_data_points.py @@ -9,8 +9,15 @@ async def add_data_points(data_points: list[DataPoint]): nodes = [] edges = [] + added_nodes = {} + added_edges = {} + results = await asyncio.gather(*[ - get_graph_from_model(data_point) for data_point in data_points + get_graph_from_model( + data_point, + added_nodes = added_nodes, + added_edges = added_edges, + ) for data_point in data_points ]) for result_nodes, result_edges in results: From 0b8b2709333eace9f178ac2f8f3534a9aa171986 Mon Sep 17 00:00:00 2001 From: Boris Arzentar Date: Tue, 3 Dec 2024 21:47:23 +0100 Subject: [PATCH 52/52] fix: make get_embeddable_data static --- .../vector/lancedb/LanceDBAdapter.py | 2 +- .../vector/pgvector/PGVectorAdapter.py | 4 ++-- .../databases/vector/qdrant/QDrantAdapter.py | 4 +++- .../vector/weaviate_db/WeaviateAdapter.py | 24 ++++++++++++------- .../infrastructure/engine/models/DataPoint.py | 9 +++---- .../ingestion/ingest_data_with_metadata.py | 4 ++-- cognee/tasks/storage/index_data_points.py | 1 - poetry.lock | 4 ++-- pyproject.toml | 2 +- 9 files changed, 32 insertions(+), 22 deletions(-) diff --git a/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py b/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py index 5a1d7be35..37d340004 100644 --- a/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py +++ b/cognee/infrastructure/databases/vector/lancedb/LanceDBAdapter.py @@ -87,7 +87,7 @@ class LanceDBAdapter(VectorDBInterface): collection = await connection.open_table(collection_name) data_vectors = await self.embed_data( - [data_point.get_embeddable_data() for data_point in data_points] + [DataPoint.get_embeddable_data(data_point) for data_point in data_points] ) IdType = TypeVar("IdType") diff --git a/cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py b/cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py index 27db2c276..f2e5ee369 100644 --- a/cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py +++ b/cognee/infrastructure/databases/vector/pgvector/PGVectorAdapter.py @@ -102,7 +102,7 @@ class PGVectorAdapter(SQLAlchemyAdapter, VectorDBInterface): ) data_vectors = await self.embed_data( - [data_point.get_embeddable_data() for data_point in data_points] + [DataPoint.get_embeddable_data(data_point) for data_point in data_points] ) vector_size = self.embedding_engine.get_vector_size() @@ -143,7 +143,7 @@ class PGVectorAdapter(SQLAlchemyAdapter, VectorDBInterface): await self.create_data_points(f"{index_name}_{index_property_name}", [ IndexSchema( id = data_point.id, - text = data_point.get_embeddable_data(), + text = DataPoint.get_embeddable_data(data_point), ) for data_point in data_points ]) diff --git a/cognee/infrastructure/databases/vector/qdrant/QDrantAdapter.py b/cognee/infrastructure/databases/vector/qdrant/QDrantAdapter.py index dc33e98ae..d5d2a1a5c 100644 --- a/cognee/infrastructure/databases/vector/qdrant/QDrantAdapter.py +++ b/cognee/infrastructure/databases/vector/qdrant/QDrantAdapter.py @@ -102,7 +102,9 @@ class QDrantAdapter(VectorDBInterface): async def create_data_points(self, collection_name: str, data_points: List[DataPoint]): client = self.get_qdrant_client() - data_vectors = await self.embed_data([data_point.get_embeddable_data() for data_point in data_points]) + data_vectors = await self.embed_data([ + DataPoint.get_embeddable_data(data_point) for data_point in data_points + ]) def convert_to_qdrant_point(data_point: DataPoint): return models.PointStruct( diff --git a/cognee/infrastructure/databases/vector/weaviate_db/WeaviateAdapter.py b/cognee/infrastructure/databases/vector/weaviate_db/WeaviateAdapter.py index 0c97dc9a8..c16f765b0 100644 --- a/cognee/infrastructure/databases/vector/weaviate_db/WeaviateAdapter.py +++ b/cognee/infrastructure/databases/vector/weaviate_db/WeaviateAdapter.py @@ -83,7 +83,7 @@ class WeaviateAdapter(VectorDBInterface): from weaviate.classes.data import DataObject data_vectors = await self.embed_data( - [data_point.get_embeddable_data() for data_point in data_points] + [DataPoint.get_embeddable_data(data_point) for data_point in data_points] ) def convert_to_weaviate_data_points(data_point: DataPoint): @@ -116,12 +116,20 @@ class WeaviateAdapter(VectorDBInterface): ) else: data_point: DataObject = data_points[0] - return collection.data.update( - uuid = data_point.uuid, - vector = data_point.vector, - properties = data_point.properties, - references = data_point.references, - ) + if collection.data.exists(data_point.uuid): + return collection.data.update( + uuid = data_point.uuid, + vector = data_point.vector, + properties = data_point.properties, + references = data_point.references, + ) + else: + return collection.data.insert( + uuid = data_point.uuid, + vector = data_point.vector, + properties = data_point.properties, + references = data_point.references, + ) except Exception as error: logger.error("Error creating data points: %s", str(error)) raise error @@ -133,7 +141,7 @@ class WeaviateAdapter(VectorDBInterface): await self.create_data_points(f"{index_name}_{index_property_name}", [ IndexSchema( id = data_point.id, - text = data_point.get_embeddable_data(), + text = DataPoint.get_embeddable_data(data_point), ) for data_point in data_points ]) diff --git a/cognee/infrastructure/engine/models/DataPoint.py b/cognee/infrastructure/engine/models/DataPoint.py index b76971f34..abb924f2f 100644 --- a/cognee/infrastructure/engine/models/DataPoint.py +++ b/cognee/infrastructure/engine/models/DataPoint.py @@ -19,10 +19,11 @@ class DataPoint(BaseModel): # class Config: # underscore_attrs_are_private = True - def get_embeddable_data(self): - if self._metadata and len(self._metadata["index_fields"]) > 0 \ - and hasattr(self, self._metadata["index_fields"][0]): - attribute = getattr(self, self._metadata["index_fields"][0]) + @classmethod + def get_embeddable_data(self, data_point): + if data_point._metadata and len(data_point._metadata["index_fields"]) > 0 \ + and hasattr(data_point, data_point._metadata["index_fields"][0]): + attribute = getattr(data_point, data_point._metadata["index_fields"][0]) if isinstance(attribute, str): return attribute.strip() diff --git a/cognee/tasks/ingestion/ingest_data_with_metadata.py b/cognee/tasks/ingestion/ingest_data_with_metadata.py index 0c17b71f5..abd3c9f94 100644 --- a/cognee/tasks/ingestion/ingest_data_with_metadata.py +++ b/cognee/tasks/ingestion/ingest_data_with_metadata.py @@ -20,8 +20,8 @@ async def ingest_data_with_metadata(data: Any, dataset_name: str, user: User): destination = get_dlt_destination() pipeline = dlt.pipeline( - pipeline_name="file_load_from_filesystem", - destination=destination, + pipeline_name = "file_load_from_filesystem", + destination = destination, ) @dlt.resource(standalone = True, merge_key = "id") diff --git a/cognee/tasks/storage/index_data_points.py b/cognee/tasks/storage/index_data_points.py index 58e4f096d..786168b58 100644 --- a/cognee/tasks/storage/index_data_points.py +++ b/cognee/tasks/storage/index_data_points.py @@ -1,4 +1,3 @@ -import asyncio from cognee.infrastructure.databases.vector import get_vector_engine from cognee.infrastructure.engine import DataPoint diff --git a/poetry.lock b/poetry.lock index ed9d932b0..9b309de51 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4659,8 +4659,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -7840,4 +7840,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.9.0,<3.12" -content-hash = "b63498e7aa23cfe29d8bea1fc29b0fe4a5f1a9e8ae5ec75d45b4bd20438e26f9" +content-hash = "e2360f4be222743bb83b1e7316185c5f62bd73c0baaab3eee984e1c84f1cea65" diff --git a/pyproject.toml b/pyproject.toml index 23aba656b..46d0a89a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ aiosqlite = "^0.20.0" pandas = "2.0.3" filetype = "^1.2.0" nltk = "^3.8.1" -dlt = {extras = ["sqlalchemy"], version = "^1.3.0"} +dlt = {extras = ["sqlalchemy"], version = "^1.4.1"} aiofiles = "^23.2.1" qdrant-client = "^1.9.0" graphistry = "^0.33.5"