chore: add functions to improve user experience
This commit is contained in:
parent
fc7851156a
commit
370b74988e
38 changed files with 854 additions and 1862 deletions
4
.gitignore
vendored
4
.gitignore
vendored
|
|
@ -163,8 +163,8 @@ cython_debug/
|
|||
#.idea/
|
||||
|
||||
.vscode/
|
||||
database/data/
|
||||
cognee/data/
|
||||
cognee/cache/
|
||||
|
||||
# .DS_Store
|
||||
# Default cognee system directory, used in development
|
||||
.cognee_system/
|
||||
|
|
|
|||
|
|
@ -1,6 +0,0 @@
|
|||
A quantum computer is a computer that takes advantage of quantum mechanical phenomena.
|
||||
At small scales, physical matter exhibits properties of both particles and waves, and quantum computing leverages this behavior, specifically quantum superposition and entanglement, using specialized hardware that supports the preparation and manipulation of quantum states.
|
||||
Classical physics cannot explain the operation of these quantum devices, and a scalable quantum computer could perform some calculations exponentially faster (with respect to input size scaling)[2] than any modern "classical" computer. In particular, a large-scale quantum computer could break widely used encryption schemes and aid physicists in performing physical simulations; however, the current state of the technology is largely experimental and impractical, with several obstacles to useful applications. Moreover, scalable quantum computers do not hold promise for many practical tasks, and for many important tasks quantum speedups are proven impossible.
|
||||
The basic unit of information in quantum computing is the qubit, similar to the bit in traditional digital electronics. Unlike a classical bit, a qubit can exist in a superposition of its two "basis" states. When measuring a qubit, the result is a probabilistic output of a classical bit, therefore making quantum computers nondeterministic in general. If a quantum computer manipulates the qubit in a particular way, wave interference effects can amplify the desired measurement results. The design of quantum algorithms involves creating procedures that allow a quantum computer to perform calculations efficiently and quickly.
|
||||
Physically engineering high-quality qubits has proven challenging. If a physical qubit is not sufficiently isolated from its environment, it suffers from quantum decoherence, introducing noise into calculations. Paradoxically, perfectly isolating qubits is also undesirable because quantum computations typically need to initialize qubits, perform controlled qubit interactions, and measure the resulting quantum states. Each of those operations introduces errors and suffers from noise, and such inaccuracies accumulate.
|
||||
In principle, a non-quantum (classical) computer can solve the same computational problems as a quantum computer, given enough time. Quantum advantage comes in the form of time complexity rather than computability, and quantum complexity theory shows that some quantum algorithms for carefully selected tasks require exponentially fewer computational steps than the best known non-quantum algorithms. Such tasks can in theory be solved on a large-scale quantum computer whereas classical computers would not finish computations in any reasonable amount of time. However, quantum speedup is not universal or even typical across computational tasks, since basic tasks such as sorting are proven to not allow any asymptotic quantum speedup. Claims of quantum supremacy have drawn significant attention to the discipline, but are demonstrated on contrived tasks, while near-term practical use cases remain limited.
|
||||
2
.test_data/Natural language processing.txt
Normal file
2
.test_data/Natural language processing.txt
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
Natural language processing (NLP) is an interdisciplinary subfield of computer science and information retrieval. It is primarily concerned with giving computers the ability to support and manipulate human language. It involves processing natural language datasets, such as text corpora or speech corpora, using either rule-based or probabilistic (i.e. statistical and, most recently, neural network-based) machine learning approaches. The goal is a computer capable of "understanding"[citation needed] the contents of documents, including the contextual nuances of the language within them. To this end, natural language processing often borrows ideas from theoretical linguistics. The technology can then accurately extract information and insights contained in the documents as well as categorize and organize the documents themselves.
|
||||
Challenges in natural language processing frequently involve speech recognition, natural-language understanding, and natural-language generation.
|
||||
168
README.md
168
README.md
|
|
@ -28,69 +28,12 @@ Make data processing for LLMs easy
|
|||
</a>
|
||||
</p>
|
||||
|
||||
[//]: # (<p>)
|
||||
|
||||
[//]: # ( <b>Share cognee Repository</b>)
|
||||
|
||||
[//]: # (</p>)
|
||||
|
||||
[//]: # (<p>)
|
||||
|
||||
[//]: # ( <a href="https://twitter.com/intent/tweet?text=Check%20this%20GitHub%20repository%20out.%20Cognee%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.&url=https://github.com/topoteretes/cognee&hashtags=AGI,Autonomics,Cognee,future" target="_blank">)
|
||||
|
||||
[//]: # ( <img src="https://img.shields.io/twitter/follow/_promethAI?label=Share Repo on Twitter&style=social" alt="Follow Cognee"/>)
|
||||
|
||||
[//]: # ( </a>)
|
||||
|
||||
[//]: # ( <a href="https://t.me/share/url?text=Check%20this%20GitHub%20repository%20out.%20Cognee%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.&url=https://github.com/topoteretes/cognee" target="_blank">)
|
||||
|
||||
[//]: # ( <img src="https://img.shields.io/twitter/url?label=Telegram&logo=Telegram&style=social&url=https://github.com/topoteretes/cognee" alt="Share on Telegram"/>)
|
||||
|
||||
[//]: # ( </a>)
|
||||
|
||||
[//]: # ( <a href="https://api.whatsapp.com/send?text=Check%20this%20GitHub%20repository%20out.%20Cognee%20-%20Let's%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.%20https://github.com/topoteretes/cognee" target="_blank">)
|
||||
|
||||
[//]: # ( <img src="https://img.shields.io/twitter/url?label=whatsapp&logo=whatsapp&style=social&url=https://github.com/topoteretes/cognee" />)
|
||||
|
||||
[//]: # ( </a>)
|
||||
|
||||
[//]: # ( <a href="https://www.reddit.com/submit?url=https://github.com/topoteretes/cognee&title=Check%20this%20GitHub%20repository%20out.%20Cognee%20-%20Let's%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.)
|
||||
|
||||
[//]: # ( " target="_blank">)
|
||||
|
||||
[//]: # ( <img src="https://img.shields.io/twitter/url?label=Reddit&logo=Reddit&style=social&url=https://github.com/topoteretes/cognee" alt="Share on Reddit"/>)
|
||||
|
||||
[//]: # ( </a>)
|
||||
|
||||
[//]: # ( <a href="mailto:?subject=Check%20this%20GitHub%20repository%20out.&body=Cognee%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.%3A%0Ahttps://github.com/topoteretes/cognee" target="_blank">)
|
||||
|
||||
[//]: # ( <img src="https://img.shields.io/twitter/url?label=Gmail&logo=Gmail&style=social&url=https://github.com/topoteretes/cognee"/>)
|
||||
|
||||
[//]: # ( </a>)
|
||||
|
||||
[//]: # ( <a href="https://www.buymeacoffee.com/promethAI" target="_blank">)
|
||||
|
||||
[//]: # ( <img src="https://cdn.buymeacoffee.com/buttons/default-orange.png" alt="Buy Me A Coffee" height="23" width="100" style="border-radius:1px">)
|
||||
|
||||
[//]: # ( </a>)
|
||||
|
||||
[//]: # (</p>)
|
||||
|
||||
[//]: # ()
|
||||
[//]: # (<hr>)
|
||||
|
||||
[//]: # ()
|
||||
[//]: # ([Star us on Github!](https://www.github.com/topoteretes/cognee))
|
||||
|
||||
[//]: # ()
|
||||
[//]: # (<a href="https://www.cognee.ai" target="_blank">Cognee</a> runs in iterations, from POC towards production-ready code.)
|
||||
|
||||
|
||||
## 🚀 It's alive
|
||||
<p>
|
||||
|
||||
Try it yourself on Whatsapp with one of our <a href="https://keepi.ai">partners</a> by typing `/save {content you want to save}` followed by `/query {knowledge you saved previously}`
|
||||
For more info here are the <a href="https://topoteretes.github.io/cognee/">docs</a>
|
||||
<p>
|
||||
Try it yourself on Whatsapp with one of our <a href="https://keepi.ai" target="_blank">partners</a> by typing `/save {content you want to save}` followed by `/query {knowledge you saved previously}`
|
||||
For more info here are the <a href="https://topoteretes.github.io/cognee">docs</a>
|
||||
</p>
|
||||
|
||||
|
||||
|
|
@ -110,59 +53,122 @@ poetry add "cognee[weaviate]"
|
|||
|
||||
## 💻 Usage
|
||||
|
||||
Check out our demo notebook [here](https://github.com/topoteretes/cognee/blob/main/notebooks/cognee%20-%20Get%20Started.ipynb)
|
||||
### Setup
|
||||
|
||||
Create `.env` file in your project in order to store environment variables such as API keys.
|
||||
|
||||
- Set OpenAI API Key as an environment variable
|
||||
Note: Don't push `.env` file to git repo as it will expose those keys to others.
|
||||
|
||||
If cognee is installed with Weaviate as a vector database provider, add Weaviate environment variables.
|
||||
```
|
||||
import os
|
||||
|
||||
# Setting an environment variable
|
||||
os.environ['OPENAI_API_KEY'] = ''
|
||||
|
||||
|
||||
WEAVIATE_URL = {YOUR_WEAVIATE_URL}
|
||||
WEAVIATE_API_KEY = {YOUR_WEAVIATE_API_KEY}
|
||||
```
|
||||
|
||||
Otherwise if cognee is installed with a default (Qdrant) vector database provider, add Qdrant environment variables.
|
||||
```
|
||||
QDRANT_URL = {YOUR_QDRANT_URL}
|
||||
QDRANT_API_KEY = {YOUR_QDRANT_API_KEY}
|
||||
```
|
||||
|
||||
- Add a new piece of information to storage
|
||||
Add OpenAI API Key environment variable
|
||||
```
|
||||
OPENAI_API_KEY = {YOUR_OPENAI_API_KEY}
|
||||
```
|
||||
|
||||
Cognee stores data and system files inside the library directory, which is lost if the library folder is removed.
|
||||
You can change the directories where cognee will store data and system files by calling config functions.
|
||||
```
|
||||
import cognee
|
||||
cognee.add(absolute_data_path, dataset_name)
|
||||
|
||||
cognee.config.system_root_directory(absolute_path_to_directory)
|
||||
|
||||
cognee.config.data_root_directory(absolute_path_to_directory)
|
||||
```
|
||||
|
||||
- Use LLMs and cognee to create graphs
|
||||
|
||||
### Run
|
||||
|
||||
Add a new piece of information to storage
|
||||
```
|
||||
import cognee
|
||||
|
||||
cognee.add("some_text", dataset_name)
|
||||
|
||||
cognee.add([
|
||||
"some_text_1",
|
||||
"some_text_2",
|
||||
"some_text_3",
|
||||
...
|
||||
])
|
||||
```
|
||||
Or
|
||||
```
|
||||
cognee.add("file://{absolute_path_to_file}", dataset_name)
|
||||
|
||||
cognee.add(
|
||||
[
|
||||
"file://{absolute_path_to_file_1}",
|
||||
"file://{absolute_path_to_file_2}",
|
||||
"file://{absolute_path_to_file_3}",
|
||||
...
|
||||
],
|
||||
dataset_name
|
||||
)
|
||||
```
|
||||
Or
|
||||
```
|
||||
cognee.add("data://{absolute_path_to_directory}", dataset_name)
|
||||
|
||||
# This is useful if you have a directory with files organized in subdirectories.
|
||||
# You can target which directory to add by providing dataset_name.
|
||||
# Example:
|
||||
# root
|
||||
# / \
|
||||
# reports bills
|
||||
# / \
|
||||
# 2024 2023
|
||||
#
|
||||
# cognee.add("data://{absolute_path_to_root}", "reports.2024")
|
||||
# This will add just directory 2024 under reports.
|
||||
```
|
||||
|
||||
Use LLMs and cognee to create graphs
|
||||
```
|
||||
cognee.cognify(dataset_name)
|
||||
```
|
||||
|
||||
- Render the graph after adding your Graphistry credentials to .env
|
||||
Render the graph with our util function
|
||||
|
||||
```
|
||||
graph_url = await render_graph(graph, graph_type = "networkx")
|
||||
from cognee.utils import render_graph
|
||||
|
||||
graph_url = await render_graph(graph)
|
||||
|
||||
print(graph_url)
|
||||
```
|
||||
|
||||
- Query the graph for a piece of information
|
||||
|
||||
Query the graph for a piece of information
|
||||
```
|
||||
query_params = {
|
||||
SearchType.SIMILARITY: {'query': 'your search query here'}
|
||||
}
|
||||
cognee.search(graph, query_params)
|
||||
```
|
||||
|
||||
search_results = cognee.search(graph, query_params)
|
||||
|
||||
print(search_results)
|
||||
```
|
||||
|
||||
|
||||
## Demo
|
||||
|
||||
|
||||
[<img src="https://i3.ytimg.com/vi/-ARUfIzhzC4/maxresdefault.jpg" width="100%">](https://youtu.be/-ARUfIzhzC4 "Learn about cognee: 55")
|
||||
|
||||
Check out our demo notebook [here](https://github.com/topoteretes/cognee/blob/main/notebooks/cognee%20-%20Get%20Started.ipynb)
|
||||
|
||||
|
||||
## Architecture
|
||||
|
||||
[<img src="https://i3.ytimg.com/vi/-ARUfIzhzC4/maxresdefault.jpg" width="100%">](https://youtu.be/-ARUfIzhzC4 "Learn about cognee: 55")
|
||||
|
||||
|
||||
### How Cognee Enhances Your Contextual Memory
|
||||
|
||||
Our framework for the OpenAI, Graph (Neo4j) and Vector (Weaviate) databases introduces three key enhancements:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
from .api.v1.config.config import config
|
||||
from .api.v1.add.add import add
|
||||
from .api.v1.cognify.cognify import cognify
|
||||
from .api.v1.list_datasets.list_datasets import list_datasets
|
||||
from .api.v1.datasets.datasets import datasets
|
||||
from .api.v1.search.search import search, SearchType
|
||||
from .api.v1.prune import prune
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
from typing import List, Union
|
||||
from os import path, listdir
|
||||
from os import path
|
||||
import asyncio
|
||||
import dlt
|
||||
import duckdb
|
||||
from cognee.root_dir import get_absolute_path
|
||||
import cognee.modules.ingestion as ingestion
|
||||
from cognee.infrastructure import infrastructure_config
|
||||
from cognee.infrastructure.files import get_file_metadata
|
||||
from cognee.infrastructure.files.storage import LocalStorage
|
||||
from cognee.modules.discovery import discover_directory_datasets
|
||||
|
||||
async def add(data_path: Union[str, List[str]], dataset_name: str = None):
|
||||
if isinstance(data_path, str):
|
||||
|
|
@ -21,15 +21,32 @@ async def add(data_path: Union[str, List[str]], dataset_name: str = None):
|
|||
else:
|
||||
return await add_text(data_path, dataset_name)
|
||||
|
||||
# data_path is a list of file paths
|
||||
return await add_files(data_path, dataset_name)
|
||||
# data_path is a list of file paths or texts
|
||||
file_paths = []
|
||||
texts = []
|
||||
|
||||
for file_path in data_path:
|
||||
if file_path.startswith("/") or file_path.startswith("file://"):
|
||||
file_paths.append(file_path)
|
||||
else:
|
||||
texts.append(file_path)
|
||||
|
||||
awaitables = []
|
||||
|
||||
if len(file_paths) > 0:
|
||||
awaitables.append(add_files(file_paths, dataset_name))
|
||||
|
||||
if len(texts) > 0:
|
||||
for text in texts:
|
||||
awaitables.append(add_text(text, dataset_name))
|
||||
|
||||
return await asyncio.gather(*awaitables)
|
||||
|
||||
async def add_files(file_paths: List[str], dataset_name: str):
|
||||
data_directory_path = infrastructure_config.get_config()["data_path"]
|
||||
db_path = get_absolute_path("./data/cognee")
|
||||
db_location = f"{db_path}/cognee.duckdb"
|
||||
infra_config = infrastructure_config.get_config()
|
||||
data_directory_path = infra_config["data_root_directory"]
|
||||
|
||||
LocalStorage.ensure_directory_exists(db_path)
|
||||
LocalStorage.ensure_directory_exists(infra_config["database_directory_path"])
|
||||
|
||||
processed_file_paths = []
|
||||
|
||||
|
|
@ -38,14 +55,17 @@ async def add_files(file_paths: List[str], dataset_name: str):
|
|||
|
||||
if data_directory_path not in file_path:
|
||||
file_name = file_path.split("/")[-1]
|
||||
dataset_file_path = data_directory_path + "/" + dataset_name.replace('.', "/") + "/" + file_name
|
||||
file_directory_path = data_directory_path + "/" + (dataset_name.replace('.', "/") + "/" if dataset_name != "root" else "")
|
||||
dataset_file_path = path.join(file_directory_path, file_name)
|
||||
|
||||
LocalStorage.ensure_directory_exists(file_directory_path)
|
||||
|
||||
LocalStorage.copy_file(file_path, dataset_file_path)
|
||||
processed_file_paths.append(dataset_file_path)
|
||||
else:
|
||||
processed_file_paths.append(file_path)
|
||||
|
||||
db = duckdb.connect(db_location)
|
||||
db = duckdb.connect(infra_config["database_path"])
|
||||
|
||||
destination = dlt.destinations.duckdb(
|
||||
credentials = db,
|
||||
|
|
@ -84,29 +104,8 @@ async def add_files(file_paths: List[str], dataset_name: str):
|
|||
|
||||
return run_info
|
||||
|
||||
def extract_datasets_from_data(root_dir_path: str, parent_dir: str = "root"):
|
||||
datasets = {}
|
||||
|
||||
root_dir_path = root_dir_path.replace("file://", "")
|
||||
|
||||
for file_or_dir in listdir(root_dir_path):
|
||||
if path.isdir(path.join(root_dir_path, file_or_dir)):
|
||||
dataset_name = file_or_dir if parent_dir == "root" else parent_dir + "." + file_or_dir
|
||||
|
||||
nested_datasets = extract_datasets_from_data("file://" + path.join(root_dir_path, file_or_dir), dataset_name)
|
||||
|
||||
for dataset in nested_datasets.keys():
|
||||
datasets[dataset] = nested_datasets[dataset]
|
||||
else:
|
||||
if parent_dir not in datasets:
|
||||
datasets[parent_dir] = []
|
||||
|
||||
datasets[parent_dir].append(path.join(root_dir_path, file_or_dir))
|
||||
|
||||
return datasets
|
||||
|
||||
async def add_data_directory(data_path: str, dataset_name: str = None):
|
||||
datasets = extract_datasets_from_data(data_path)
|
||||
datasets = discover_directory_datasets(data_path)
|
||||
|
||||
results = []
|
||||
|
||||
|
|
@ -117,7 +116,7 @@ async def add_data_directory(data_path: str, dataset_name: str = None):
|
|||
return await asyncio.gather(*results)
|
||||
|
||||
async def add_text(text: str, dataset_name: str):
|
||||
data_directory_path = infrastructure_config.get_config()["data_path"]
|
||||
data_directory_path = infrastructure_config.get_config()["data_root_directory"]
|
||||
|
||||
classified_data = ingestion.classify(text)
|
||||
data_id = ingestion.identify(classified_data)
|
||||
|
|
|
|||
|
|
@ -22,7 +22,6 @@ from cognee.shared.data_models import DefaultContentPrediction, KnowledgeGraph,
|
|||
SummarizedContent, LabeledContent
|
||||
from cognee.infrastructure.databases.graph.get_graph_client import get_graph_client
|
||||
from cognee.shared.data_models import GraphDBType
|
||||
from cognee.infrastructure.databases.relational import DuckDBAdapter
|
||||
from cognee.modules.cognify.graph.add_document_node import add_document_node
|
||||
from cognee.modules.cognify.graph.initialize_graph import initialize_graph
|
||||
from cognee.infrastructure.files.utils.guess_file_type import guess_file_type
|
||||
|
|
@ -39,10 +38,10 @@ USER_ID = "default_user"
|
|||
async def cognify(datasets: Union[str, List[str]] = None, graph_data_model: object = None):
|
||||
"""This function is responsible for the cognitive processing of the content."""
|
||||
|
||||
db = DuckDBAdapter()
|
||||
db_engine = infrastructure_config.get_config()["database_engine"]
|
||||
|
||||
if datasets is None or len(datasets) == 0:
|
||||
datasets = db.get_datasets()
|
||||
datasets = db_engine.get_datasets()
|
||||
|
||||
awaitables = []
|
||||
|
||||
|
|
@ -55,14 +54,14 @@ async def cognify(datasets: Union[str, List[str]] = None, graph_data_model: obje
|
|||
return graphs[0]
|
||||
|
||||
# datasets is a dataset name string
|
||||
added_datasets = db.get_datasets()
|
||||
added_datasets = db_engine.get_datasets()
|
||||
|
||||
files_metadata = []
|
||||
dataset_name = datasets.replace(".", "_").replace(" ", "_")
|
||||
|
||||
for added_dataset in added_datasets:
|
||||
if dataset_name in added_dataset:
|
||||
files_metadata.extend(db.get_files_metadata(added_dataset))
|
||||
files_metadata.extend(db_engine.get_files_metadata(added_dataset))
|
||||
|
||||
awaitables = []
|
||||
|
||||
|
|
@ -168,10 +167,10 @@ async def process_text(input_text: str, file_metadata: dict):
|
|||
unique_layers = nodes_by_layer.keys()
|
||||
|
||||
try:
|
||||
db_engine = infrastructure_config.get_config()["vector_engine"]
|
||||
vector_engine = infrastructure_config.get_config()["vector_engine"]
|
||||
|
||||
for layer in unique_layers:
|
||||
await db_engine.create_collection(layer)
|
||||
await vector_engine.create_collection(layer)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
|
@ -198,7 +197,7 @@ if __name__ == "__main__":
|
|||
async def main():
|
||||
graph = await cognify(datasets=['izmene'])
|
||||
from cognee.utils import render_graph
|
||||
graph_url = await render_graph(graph, graph_type="networkx")
|
||||
graph_url = await render_graph(graph)
|
||||
print(graph_url)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,14 @@
|
|||
from typing import Optional
|
||||
from cognee.infrastructure import infrastructure_config
|
||||
|
||||
class config():
|
||||
@staticmethod
|
||||
def data_path(data_path: Optional[str] = None) -> str:
|
||||
def system_root_directory(system_root_directory: str):
|
||||
infrastructure_config.set_config({
|
||||
"data_path": data_path
|
||||
"system_root_directory": system_root_directory
|
||||
})
|
||||
|
||||
@staticmethod
|
||||
def data_root_directory(data_root_directory: str):
|
||||
infrastructure_config.set_config({
|
||||
"data_root_directory": data_root_directory
|
||||
})
|
||||
|
|
|
|||
17
cognee/api/v1/datasets/datasets.py
Normal file
17
cognee/api/v1/datasets/datasets.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
from cognee.modules.discovery import discover_directory_datasets
|
||||
from cognee.infrastructure import infrastructure_config
|
||||
|
||||
class datasets():
|
||||
@staticmethod
|
||||
def list_datasets():
|
||||
db = infrastructure_config.get_config()["database_engine"]
|
||||
return db.get_datasets()
|
||||
|
||||
@staticmethod
|
||||
def discover_datasets(directory_path: str):
|
||||
return list(discover_directory_datasets(directory_path).keys())
|
||||
|
||||
@staticmethod
|
||||
def query_data(dataset_name: str):
|
||||
db = infrastructure_config.get_config()["database_engine"]
|
||||
return db.get_files_metadata(dataset_name)
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
|
||||
from cognee.infrastructure.databases.relational import DuckDBAdapter
|
||||
|
||||
def list_datasets():
|
||||
db = DuckDBAdapter()
|
||||
return db.get_datasets()
|
||||
1
cognee/api/v1/prune/__init__.py
Normal file
1
cognee/api/v1/prune/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
from .prune import prune
|
||||
17
cognee/api/v1/prune/prune.py
Normal file
17
cognee/api/v1/prune/prune.py
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
from cognee.infrastructure.files.storage import LocalStorage
|
||||
from cognee.infrastructure import infrastructure_config
|
||||
|
||||
class prune():
|
||||
@staticmethod
|
||||
async def prune_data():
|
||||
data_root_directory = infrastructure_config.get_config()["data_root_directory"]
|
||||
LocalStorage.remove_all(data_root_directory)
|
||||
|
||||
@staticmethod
|
||||
async def prune_system():
|
||||
infra_config = infrastructure_config.get_config()
|
||||
system_root_directory = infra_config["system_root_directory"]
|
||||
LocalStorage.remove_all(system_root_directory)
|
||||
|
||||
vector_engine = infra_config["vector_engine"]
|
||||
await vector_engine.prune()
|
||||
|
|
@ -7,6 +7,7 @@ from typing import Optional, Dict, Any
|
|||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from dotenv import load_dotenv
|
||||
from cognee.root_dir import get_absolute_path
|
||||
|
||||
|
||||
base_dir = Path(__file__).resolve().parent.parent
|
||||
|
|
@ -27,21 +28,25 @@ class Config:
|
|||
)
|
||||
)
|
||||
|
||||
data_path = os.getenv("DATA_PATH", str(Path(__file__).resolve().parent.parent / ".data"))
|
||||
|
||||
db_path = str(Path(__file__).resolve().parent / "data/system")
|
||||
system_root_directory = get_absolute_path(".cognee_system")
|
||||
data_root_directory = os.getenv("DATA_PATH", get_absolute_path(".data"))
|
||||
|
||||
vectordb: str = os.getenv("VECTORDB", "weaviate")
|
||||
|
||||
qdrant_path: str = os.getenv("QDRANT_PATH")
|
||||
qdrant_url: str = os.getenv("QDRANT_URL")
|
||||
qdrant_api_key: str = os.getenv("QDRANT_API_KEY")
|
||||
db_type: str = os.getenv("DB_TYPE", "sqlite")
|
||||
db_name: str = os.getenv("DB_NAME", "cognee.sqlite")
|
||||
|
||||
db_path = str = os.getenv("COGNEE_DB_PATH", "cognee")
|
||||
db_name: str = os.getenv("DB_NAME", "cognee.db")
|
||||
db_host: str = os.getenv("DB_HOST", "localhost")
|
||||
db_port: str = os.getenv("DB_PORT", "5432")
|
||||
db_user: str = os.getenv("DB_USER", "cognee")
|
||||
db_password: str = os.getenv("DB_PASSWORD", "cognee")
|
||||
|
||||
sqlalchemy_logging: bool = os.getenv("SQLALCHEMY_LOGGING", True)
|
||||
|
||||
graph_filename = os.getenv("GRAPH_NAME", "cognee_graph.pkl")
|
||||
|
||||
# Model parameters
|
||||
|
|
@ -52,7 +57,7 @@ class Config:
|
|||
ollama_endpoint: str = "http://localhost:11434/v1"
|
||||
ollama_key: Optional[str] = "ollama"
|
||||
ollama_model: str = "mistral:instruct"
|
||||
model: str = "gpt-4-0125-preview"
|
||||
model: str = "gpt-4-1106-preview"
|
||||
# model: str = "gpt-3.5-turbo"
|
||||
model_endpoint: str = "openai"
|
||||
openai_key: Optional[str] = os.getenv("OPENAI_API_KEY")
|
||||
|
|
@ -63,8 +68,7 @@ class Config:
|
|||
|
||||
# Embedding parameters
|
||||
embedding_model: str = "BAAI/bge-large-en-v1.5"
|
||||
embedding_dim: int = 1536
|
||||
embedding_chunk_size: int = 300
|
||||
embedding_dimensions: int = 1024
|
||||
|
||||
# Database parameters
|
||||
if (
|
||||
|
|
@ -93,7 +97,6 @@ class Config:
|
|||
or os.getenv("AWS_ENV") == "prd"
|
||||
):
|
||||
load_dotenv()
|
||||
db_type = 'postgresql'
|
||||
|
||||
db_host: str = os.getenv("POSTGRES_HOST")
|
||||
logging.info("db_host: %s", db_host)
|
||||
|
|
|
|||
|
|
@ -1,45 +1,75 @@
|
|||
from cognee.config import Config
|
||||
from .databases.relational import SqliteEngine, DatabaseEngine
|
||||
from .databases.vector.weaviate_db import WeaviateAdapter
|
||||
from .databases.relational import DuckDBAdapter, DatabaseEngine
|
||||
from .databases.vector.vector_db_interface import VectorDBInterface
|
||||
from .databases.vector.qdrant.QDrantAdapter import QDrantAdapter
|
||||
from .databases.vector.embeddings.DefaultEmbeddingEngine import DefaultEmbeddingEngine
|
||||
from .llm.llm_interface import LLMInterface
|
||||
from .llm.openai.adapter import OpenAIAdapter
|
||||
from .files.storage import LocalStorage
|
||||
|
||||
config = Config()
|
||||
config.load()
|
||||
|
||||
class InfrastructureConfig():
|
||||
data_path: str = config.data_path
|
||||
system_root_directory: str = config.system_root_directory
|
||||
data_root_directory: str = config.data_root_directory
|
||||
database_engine: DatabaseEngine = None
|
||||
vector_engine: VectorDBInterface = None
|
||||
llm_engine: LLMInterface = None
|
||||
|
||||
def get_config(self) -> dict:
|
||||
if self.database_engine is None:
|
||||
self.database_engine = SqliteEngine(config.db_path, config.db_name)
|
||||
db_path = self.system_root_directory + "/" + config.db_path
|
||||
|
||||
LocalStorage.ensure_directory_exists(db_path)
|
||||
|
||||
self.database_engine = DuckDBAdapter(
|
||||
db_name = config.db_name,
|
||||
db_path = db_path
|
||||
)
|
||||
|
||||
if self.llm_engine is None:
|
||||
self.llm_engine = OpenAIAdapter(config.openai_key, config.model)
|
||||
|
||||
if self.vector_engine is None:
|
||||
self.vector_engine = WeaviateAdapter(
|
||||
config.weaviate_url,
|
||||
config.weaviate_api_key,
|
||||
embedding_engine = DefaultEmbeddingEngine()
|
||||
)
|
||||
try:
|
||||
from .databases.vector.weaviate_db import WeaviateAdapter
|
||||
|
||||
if config.weaviate_url is None and config.weaviate_api_key is None:
|
||||
raise EnvironmentError("Weaviate is not configured!")
|
||||
|
||||
self.vector_engine = WeaviateAdapter(
|
||||
config.weaviate_url,
|
||||
config.weaviate_api_key,
|
||||
embedding_engine = DefaultEmbeddingEngine()
|
||||
)
|
||||
except ImportError:
|
||||
if config.qdrant_url is None and config.qdrant_api_key is None:
|
||||
raise EnvironmentError("Qdrant is not configured!")
|
||||
|
||||
self.vector_engine = QDrantAdapter(
|
||||
qdrant_url = config.qdrant_url,
|
||||
qdrant_api_key = config.qdrant_api_key,
|
||||
embedding_engine = DefaultEmbeddingEngine()
|
||||
)
|
||||
|
||||
return {
|
||||
"data_path": self.data_path,
|
||||
"llm_engine": self.llm_engine,
|
||||
"vector_engine": self.vector_engine,
|
||||
"database_engine": self.database_engine,
|
||||
"system_root_directory": self.system_root_directory,
|
||||
"data_root_directory": self.data_root_directory,
|
||||
"database_directory_path": self.system_root_directory + "/" + config.db_path,
|
||||
"database_path": self.system_root_directory + "/" + config.db_path + "/" + config.db_name,
|
||||
}
|
||||
|
||||
def set_config(self, new_config: dict):
|
||||
if "data_path" in new_config:
|
||||
self.data_path = new_config["data_path"]
|
||||
|
||||
if "system_root_directory" in new_config:
|
||||
self.system_root_directory = new_config["system_root_directory"]
|
||||
|
||||
if "data_root_directory" in new_config:
|
||||
self.data_root_directory = new_config["data_root_directory"]
|
||||
|
||||
if "database_engine" in new_config:
|
||||
self.database_engine = new_config["database_engine"]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
"""Factory function to get the appropriate graph client based on the graph type."""
|
||||
from cognee.config import Config
|
||||
from cognee.root_dir import get_absolute_path
|
||||
from cognee.shared.data_models import GraphDBType
|
||||
from cognee.infrastructure import infrastructure_config
|
||||
from .graph_db_interface import GraphDBInterface
|
||||
from .networkx.adapter import NetworXAdapter
|
||||
# from .neo4j.adapter import Neo4jAdapter
|
||||
|
|
@ -13,7 +13,7 @@ config.load()
|
|||
def get_graph_client(graph_type: GraphDBType, graph_filename: str=None) -> GraphDBInterface :
|
||||
"""Factory function to get the appropriate graph client based on the graph type."""
|
||||
if graph_filename is None:
|
||||
graph_filename = get_absolute_path(f"./data/cognee/{config.graph_filename}")
|
||||
graph_filename = f"{infrastructure_config.get_config()['database_directory_path']}/{config.graph_filename}"
|
||||
if graph_type == GraphDBType.NETWORKX:
|
||||
return NetworXAdapter(filename = graph_filename)
|
||||
elif graph_type == GraphDBType.NEO4J:
|
||||
|
|
|
|||
|
|
@ -1,10 +1,8 @@
|
|||
import duckdb
|
||||
from cognee.root_dir import get_absolute_path
|
||||
|
||||
class DuckDBAdapter():
|
||||
def __init__(self):
|
||||
db_path = get_absolute_path("./data/cognee")
|
||||
db_location = db_path + "/cognee.duckdb"
|
||||
def __init__(self, db_path: str, db_name: str):
|
||||
db_location = db_path + "/" + db_name
|
||||
|
||||
self.db_client = duckdb.connect(db_location)
|
||||
|
||||
|
|
@ -19,4 +17,4 @@ class DuckDBAdapter():
|
|||
)
|
||||
|
||||
def get_files_metadata(self, dataset_name: str):
|
||||
return self.db_client.sql(f"SELECT * FROM {dataset_name}.file_metadata;").to_df().to_dict("records")
|
||||
return self.db_client.sql(f"SELECT id, name, file_path, extension, mime_type, keywords FROM {dataset_name}.file_metadata;").to_df().to_dict("records")
|
||||
|
|
|
|||
|
|
@ -1,74 +0,0 @@
|
|||
import uuid
|
||||
from pathlib import Path
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
|
||||
from cognee.config import Config
|
||||
# from ..relational_db_interface import RelationalDBInterface
|
||||
from cognee.database.relationaldb.models.memory import MemoryModel
|
||||
|
||||
config = Config()
|
||||
config.load()
|
||||
|
||||
class RelationalDBAdapter():
|
||||
session_maker: async_sessionmaker[AsyncSession]
|
||||
|
||||
def __init__(self):
|
||||
engine = create_async_engine(
|
||||
self.get_database_url(),
|
||||
pool_recycle = 3600,
|
||||
echo = config.sqlalchemy_logging,
|
||||
)
|
||||
self.create_session = async_sessionmaker[AsyncSession](
|
||||
bind = engine,
|
||||
class_ = AsyncSession,
|
||||
expire_on_commit = False,
|
||||
)
|
||||
|
||||
def get_database_url(
|
||||
self,
|
||||
db_type = config.db_type,
|
||||
db_name = config.db_name,
|
||||
db_path = config.db_path,
|
||||
user = config.db_user,
|
||||
password = config.db_password,
|
||||
host = config.db_host,
|
||||
port = config.db_port,
|
||||
):
|
||||
if db_type == "sqlite":
|
||||
db_path = (Path(db_path) / db_name).absolute()
|
||||
return f"sqlite+aiosqlite:///{db_path}" # SQLite uses file path
|
||||
elif db_type == "duckdb":
|
||||
db_path = (Path(db_path) / db_name).absolute()
|
||||
return f"duckdb+aiosqlite:///{db_path}"
|
||||
elif db_type == "postgresql":
|
||||
# Ensure optional parameters are handled gracefully
|
||||
port_str = f":{port}" if port else ""
|
||||
password_str = f":{password}" if password else ""
|
||||
if not all([user, host]):
|
||||
raise ValueError("User and host are required for PostgreSQL connections.")
|
||||
return f"postgresql+asyncpg://{user}{password_str}@{host}{port_str}/{db_name}"
|
||||
else:
|
||||
raise ValueError(f"Unsupported database type: {db_type}")
|
||||
|
||||
async def add_memory(self, user_id: str, memory_name: str):
|
||||
memory_id = uuid.uuid4()
|
||||
|
||||
async with self.create_session() as session:
|
||||
async with session.begin():
|
||||
return session.add(MemoryModel(
|
||||
id = memory_id,
|
||||
user_id = user_id,
|
||||
memory_name = memory_name,
|
||||
))
|
||||
|
||||
async def get_memory_by_name(self, memory_name: int):
|
||||
async with self.create_session() as session:
|
||||
async with session.begin():
|
||||
result = await session.execute(
|
||||
select(MemoryModel.id)
|
||||
.where(MemoryModel.memory_name == memory_name)
|
||||
)
|
||||
|
||||
memory = result.scalars().one_or_none()
|
||||
|
||||
return memory
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
# from datetime import datetime, timezone
|
||||
# from sqlalchemy.orm import relationship
|
||||
# # from sqlalchemy.orm import DeclarativeBase
|
||||
# from sqlalchemy import Column, String, DateTime, ForeignKey
|
||||
# from cognee.database.relationaldb.database import Base
|
||||
|
||||
|
||||
# class MemoryModel(Base):
|
||||
# __tablename__ = "memories_v1"
|
||||
|
||||
# id = Column(String, primary_key = True)
|
||||
# user_id = Column(String, ForeignKey("users.id"), index = True)
|
||||
# memory_name = Column(String, nullable = True)
|
||||
# memory_category = Column(String, nullable = True)
|
||||
# created_at = Column(DateTime, default = datetime.now(timezone.utc))
|
||||
# updated_at = Column(DateTime, onupdate = datetime.now(timezone.utc))
|
||||
# methods_list = Column(String, nullable = True)
|
||||
# attributes_list = Column(String, nullable = True)
|
||||
|
||||
# user = relationship("User", back_populates="memories")
|
||||
# metadatas = relationship(
|
||||
# "MetaDatas", back_populates="memory", cascade="all, delete-orphan"
|
||||
# )
|
||||
|
||||
# def __repr__(self):
|
||||
# return f"<Memory(id={self.id}, user_id={self.user_id}, created_at={self.created_at}, updated_at={self.updated_at})>"
|
||||
|
|
@ -1,8 +1,4 @@
|
|||
from .get_vector_database import get_vector_database
|
||||
from .qdrant import QDrantAdapter
|
||||
from .models.DataPoint import DataPoint
|
||||
from .models.VectorConfig import VectorConfig
|
||||
from .models.CollectionConfig import CollectionConfig
|
||||
from .weaviate_db import WeaviateAdapter
|
||||
from .vector_db_interface import VectorDBInterface
|
||||
from .embeddings.DefaultEmbeddingEngine import DefaultEmbeddingEngine
|
||||
|
|
|
|||
|
|
@ -15,4 +15,4 @@ class DefaultEmbeddingEngine(EmbeddingEngine):
|
|||
return embeddings_list
|
||||
|
||||
def get_vector_size(self) -> int:
|
||||
return 1024
|
||||
return config.embedding_dimensions
|
||||
|
|
|
|||
|
|
@ -1,10 +0,0 @@
|
|||
from cognee.config import Config
|
||||
# from .qdrant import QDrantAdapter
|
||||
from .weaviate_db import WeaviateAdapter
|
||||
|
||||
config = Config()
|
||||
config.load()
|
||||
|
||||
def get_vector_database():
|
||||
# return QDrantAdapter(config.qdrant_path, config.qdrant_url, config.qdrant_api_key)
|
||||
return WeaviateAdapter(config.weaviate_url, config.weaviate_api_key, config.openai_key)
|
||||
|
|
@ -32,7 +32,7 @@ class QDrantAdapter(VectorDBInterface):
|
|||
qdrant_path: str = None
|
||||
qdrant_api_key: str = None
|
||||
|
||||
def __init__(self, qdrant_path, qdrant_url, qdrant_api_key, embedding_engine: EmbeddingEngine):
|
||||
def __init__(self, qdrant_url, qdrant_api_key, embedding_engine: EmbeddingEngine, qdrant_path = None):
|
||||
self.embedding_engine = embedding_engine
|
||||
|
||||
if qdrant_path is not None:
|
||||
|
|
|
|||
|
|
@ -91,4 +91,4 @@ class WeaviateAdapter(VectorDBInterface):
|
|||
return [await query_search(query_vector) for query_vector in await self.embed_data(query_texts)]
|
||||
|
||||
async def prune(self):
|
||||
self.client.collections.delete_all()
|
||||
self.client.collections.delete_all()
|
||||
|
|
|
|||
|
|
@ -38,3 +38,10 @@ class LocalStorage(Storage):
|
|||
@staticmethod
|
||||
def copy_file(source_file_path: str, destination_file_path: str):
|
||||
return shutil.copy2(source_file_path, destination_file_path)
|
||||
|
||||
@staticmethod
|
||||
def remove_all(tree_path: str):
|
||||
try:
|
||||
shutil.rmtree(tree_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -22,16 +22,12 @@ def get_llm_client():
|
|||
provider = LLMProvider(config.llm_provider)
|
||||
|
||||
if provider == LLMProvider.OPENAI:
|
||||
print("Using OpenAI API")
|
||||
return OpenAIAdapter(config.openai_key, config.model)
|
||||
elif provider == LLMProvider.OLLAMA:
|
||||
print("Using Ollama API")
|
||||
return GenericAPIAdapter(config.ollama_endpoint, config.ollama_key, config.ollama_model)
|
||||
elif provider == LLMProvider.ANTHROPIC:
|
||||
print("Using Anthropic API")
|
||||
return AnthropicAdapter(config.custom_endpoint, config.custom_endpoint, config.custom_model)
|
||||
elif provider == LLMProvider.CUSTOM:
|
||||
print("Using Custom API")
|
||||
return GenericAPIAdapter(config.custom_endpoint, config.custom_key, config.custom_model)
|
||||
# Add your custom LLM provider here
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -126,7 +126,7 @@ if __name__ == "__main__":
|
|||
|
||||
from cognee.utils import render_graph
|
||||
|
||||
graph_url = await render_graph(graph, graph_type="networkx")
|
||||
graph_url = await render_graph(graph)
|
||||
|
||||
print(graph_url)
|
||||
|
||||
|
|
|
|||
1
cognee/modules/discovery/__init__.py
Normal file
1
cognee/modules/discovery/__init__.py
Normal file
|
|
@ -0,0 +1 @@
|
|||
from .discover_directory_datasets import discover_directory_datasets
|
||||
20
cognee/modules/discovery/discover_directory_datasets.py
Normal file
20
cognee/modules/discovery/discover_directory_datasets.py
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
from os import path, listdir
|
||||
|
||||
def discover_directory_datasets(root_dir_path: str, parent_dir: str = None):
|
||||
datasets = {}
|
||||
|
||||
for file_or_dir in listdir(root_dir_path):
|
||||
if path.isdir(path.join(root_dir_path, file_or_dir)):
|
||||
dataset_name = file_or_dir if parent_dir is None else f"{parent_dir}.{file_or_dir}"
|
||||
|
||||
nested_datasets = discover_directory_datasets(path.join(root_dir_path, file_or_dir), dataset_name)
|
||||
|
||||
for dataset in nested_datasets.keys():
|
||||
datasets[dataset] = nested_datasets[dataset]
|
||||
else:
|
||||
if parent_dir not in datasets:
|
||||
datasets[parent_dir] = []
|
||||
|
||||
datasets[parent_dir].append(path.join(root_dir_path, file_or_dir))
|
||||
|
||||
return datasets
|
||||
|
|
@ -1,311 +0,0 @@
|
|||
class Memory:
|
||||
def __init__(
|
||||
self,
|
||||
user_id: str = "676",
|
||||
session=None,
|
||||
index_name: str = None,
|
||||
db_type: str = globalConfig.vectordb,
|
||||
namespace: str = None,
|
||||
memory_id: str = None,
|
||||
memory_class=None,
|
||||
job_id: str = None,
|
||||
) -> None:
|
||||
self.load_environment_variables()
|
||||
self.memory_id = memory_id
|
||||
self.user_id = user_id
|
||||
self.session = session
|
||||
self.index_name = index_name
|
||||
self.db_type = db_type
|
||||
self.namespace = namespace
|
||||
self.memory_instances = []
|
||||
self.memory_class = memory_class
|
||||
self.job_id = job_id
|
||||
# self.memory_class = DynamicBaseMemory(
|
||||
# "Memory", user_id, str(self.memory_id), index_name, db_type, namespace
|
||||
# )
|
||||
|
||||
def load_environment_variables(self) -> None:
|
||||
self.OPENAI_TEMPERATURE = globalConfig.openai_temperature
|
||||
self.OPENAI_API_KEY = globalConfig.openai_key
|
||||
|
||||
@classmethod
|
||||
async def create_memory(
|
||||
cls,
|
||||
user_id: str,
|
||||
session,
|
||||
job_id: str = None,
|
||||
memory_label: str = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Class method that acts as a factory method for creating Memory instances.
|
||||
It performs necessary DB checks or updates before instance creation.
|
||||
"""
|
||||
existing_user = await cls.check_existing_user(user_id, session)
|
||||
logging.info(f"Existing user: {existing_user}")
|
||||
|
||||
if existing_user:
|
||||
# Handle existing user scenario...
|
||||
memory_id = await cls.check_existing_memory(user_id, memory_label, session)
|
||||
if memory_id is None:
|
||||
memory_id = await cls.handle_new_memory(
|
||||
user_id=user_id,
|
||||
session=session,
|
||||
job_id=job_id,
|
||||
memory_name=memory_label,
|
||||
)
|
||||
logging.info(
|
||||
f"Existing user {user_id} found in the DB. Memory ID: {memory_id}"
|
||||
)
|
||||
else:
|
||||
# Handle new user scenario...
|
||||
await cls.handle_new_user(user_id, session)
|
||||
|
||||
memory_id = await cls.handle_new_memory(
|
||||
user_id=user_id,
|
||||
session=session,
|
||||
job_id=job_id,
|
||||
memory_name=memory_label,
|
||||
)
|
||||
logging.info(
|
||||
f"New user {user_id} created in the DB. Memory ID: {memory_id}"
|
||||
)
|
||||
|
||||
memory_class = DynamicBaseMemory(
|
||||
memory_label,
|
||||
user_id,
|
||||
str(memory_id),
|
||||
index_name=memory_label,
|
||||
db_type=globalConfig.vectordb,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
return cls(
|
||||
user_id=user_id,
|
||||
session=session,
|
||||
memory_id=memory_id,
|
||||
job_id=job_id,
|
||||
memory_class=memory_class,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
async def list_memory_classes(self):
|
||||
"""
|
||||
Lists all available memory classes in the memory instance.
|
||||
"""
|
||||
# Use a list comprehension to filter attributes that end with '_class'
|
||||
return [attr for attr in dir(self) if attr.endswith("_class")]
|
||||
|
||||
@staticmethod
|
||||
async def check_existing_user(user_id: str, session):
|
||||
"""Check if a user exists in the DB and return it."""
|
||||
result = await session.execute(select(User).where(User.id == user_id))
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
@staticmethod
|
||||
async def check_existing_memory(user_id: str, memory_label: str, session):
|
||||
"""Check if a user memory exists in the DB and return it. Filters by user and label"""
|
||||
try:
|
||||
result = await session.execute(
|
||||
select(MemoryModel.id)
|
||||
.where(MemoryModel.user_id == user_id)
|
||||
.filter_by(memory_name=memory_label)
|
||||
.order_by(MemoryModel.created_at)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logging.error(f"An error occurred: {str(e)}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
async def handle_new_user(user_id: str, session):
|
||||
"""
|
||||
Handle new user creation in the database.
|
||||
|
||||
Args:
|
||||
user_id (str): The unique identifier for the new user.
|
||||
session: The database session for the operation.
|
||||
|
||||
Returns:
|
||||
str: A success message or an error message.
|
||||
|
||||
Raises:
|
||||
Exception: If any error occurs during the user creation process.
|
||||
"""
|
||||
try:
|
||||
new_user = User(id=user_id)
|
||||
await add_entity(session, new_user)
|
||||
return "User creation successful."
|
||||
except Exception as e:
|
||||
return f"Error creating user: {str(e)}"
|
||||
|
||||
@staticmethod
|
||||
async def handle_new_memory(
|
||||
user_id: str,
|
||||
session,
|
||||
job_id: str = None,
|
||||
memory_name: str = None,
|
||||
memory_category: str = "PUBLIC",
|
||||
):
|
||||
"""
|
||||
Handle new memory creation associated with a user.
|
||||
|
||||
Args:
|
||||
user_id (str): The user's unique identifier.
|
||||
session: The database session for the operation.
|
||||
job_id (str, optional): The identifier of the associated job, if any.
|
||||
memory_name (str, optional): The name of the memory.
|
||||
|
||||
Returns:
|
||||
str: The unique memory ID if successful, or an error message.
|
||||
|
||||
Raises:
|
||||
Exception: If any error occurs during memory creation.
|
||||
"""
|
||||
try:
|
||||
memory_id = str(uuid.uuid4())
|
||||
logging.info("Job id %s", job_id)
|
||||
memory = MemoryModel(
|
||||
id=memory_id,
|
||||
user_id=user_id,
|
||||
operation_id=job_id,
|
||||
memory_name=memory_name,
|
||||
memory_category=memory_category,
|
||||
methods_list=str(["Memory", "SemanticMemory", "EpisodicMemory"]),
|
||||
attributes_list=str(
|
||||
[
|
||||
"user_id",
|
||||
"index_name",
|
||||
"db_type",
|
||||
"knowledge_source",
|
||||
"knowledge_type",
|
||||
"memory_id",
|
||||
"long_term_memory",
|
||||
"short_term_memory",
|
||||
"namespace",
|
||||
]
|
||||
),
|
||||
)
|
||||
await add_entity(session, memory)
|
||||
return memory_id
|
||||
except Exception as e:
|
||||
return f"Error creating memory: {str(e)}"
|
||||
|
||||
async def add_memory_instance(self, memory_class_name: str):
|
||||
"""Add a new memory instance to the memory_instances list."""
|
||||
instance = DynamicBaseMemory(
|
||||
memory_class_name,
|
||||
self.user_id,
|
||||
self.memory_id,
|
||||
self.index_name,
|
||||
self.db_type,
|
||||
self.namespace,
|
||||
)
|
||||
print("The following instance was defined", instance)
|
||||
self.memory_instances.append(instance)
|
||||
|
||||
async def query_method(self):
|
||||
methods_list = await self.session.execute(
|
||||
select(MemoryModel.methods_list).where(MemoryModel.id == self.memory_id)
|
||||
)
|
||||
methods_list = methods_list.scalar_one_or_none()
|
||||
return methods_list
|
||||
|
||||
async def manage_memory_attributes(self, existing_user):
|
||||
"""Manage memory attributes based on the user existence."""
|
||||
if existing_user:
|
||||
print(f"ID before query: {self.memory_id}, type: {type(self.memory_id)}")
|
||||
|
||||
# attributes_list = await self.session.query(MemoryModel.attributes_list).filter_by(id=self.memory_id[0]).scalar()
|
||||
attributes_list = await self.query_method()
|
||||
logging.info(f"Attributes list: {attributes_list}")
|
||||
if attributes_list is not None:
|
||||
attributes_list = ast.literal_eval(attributes_list)
|
||||
await self.handle_attributes(attributes_list)
|
||||
else:
|
||||
logging.warning("attributes_list is None!")
|
||||
else:
|
||||
attributes_list = [
|
||||
"user_id",
|
||||
"index_name",
|
||||
"db_type",
|
||||
"knowledge_source",
|
||||
"knowledge_type",
|
||||
"memory_id",
|
||||
"long_term_memory",
|
||||
"short_term_memory",
|
||||
"namespace",
|
||||
]
|
||||
await self.handle_attributes(attributes_list)
|
||||
|
||||
async def handle_attributes(self, attributes_list):
|
||||
"""Handle attributes for existing memory instances."""
|
||||
for attr in attributes_list:
|
||||
await self.memory_class.add_attribute(attr)
|
||||
|
||||
async def manage_memory_methods(self, existing_user):
|
||||
"""
|
||||
Manage memory methods based on the user existence.
|
||||
"""
|
||||
if existing_user:
|
||||
# Fetch existing methods from the database
|
||||
# methods_list = await self.session.query(MemoryModel.methods_list).filter_by(id=self.memory_id).scalar()
|
||||
|
||||
methods_list = await self.session.execute(
|
||||
select(MemoryModel.methods_list).where(
|
||||
MemoryModel.id == self.memory_id[0]
|
||||
)
|
||||
)
|
||||
methods_list = methods_list.scalar_one_or_none()
|
||||
methods_list = ast.literal_eval(methods_list)
|
||||
else:
|
||||
# Define default methods for a new user
|
||||
methods_list = [
|
||||
"async_create_long_term_memory",
|
||||
"async_init",
|
||||
"add_memories",
|
||||
"fetch_memories",
|
||||
"delete_memories",
|
||||
"async_create_short_term_memory",
|
||||
"_create_buffer_context",
|
||||
"_get_task_list",
|
||||
"_run_main_buffer",
|
||||
"_available_operations",
|
||||
"_provide_feedback",
|
||||
]
|
||||
# Apply methods to memory instances
|
||||
for class_instance in self.memory_instances:
|
||||
for method in methods_list:
|
||||
class_instance.add_method(method)
|
||||
|
||||
async def dynamic_method_call(
|
||||
self, dynamic_base_memory_instance, method_name: str, *args, **kwargs
|
||||
):
|
||||
if method_name in dynamic_base_memory_instance.methods:
|
||||
method = getattr(dynamic_base_memory_instance, method_name, None)
|
||||
if method:
|
||||
return await method(*args, **kwargs)
|
||||
raise AttributeError(
|
||||
f"{dynamic_base_memory_instance.name} object has no attribute {method_name}"
|
||||
)
|
||||
|
||||
async def add_dynamic_memory_class(self, class_name: str, namespace: str):
|
||||
logging.info("Here is the memory id %s", self.memory_id[0])
|
||||
new_memory_class = DynamicBaseMemory(
|
||||
class_name,
|
||||
self.user_id,
|
||||
self.memory_id[0],
|
||||
self.index_name,
|
||||
self.db_type,
|
||||
namespace,
|
||||
)
|
||||
setattr(self, f"{class_name.lower()}_class", new_memory_class)
|
||||
return new_memory_class
|
||||
|
||||
async def add_attribute_to_class(self, class_instance, attribute_name: str):
|
||||
# add this to database for a particular user and load under memory id
|
||||
await class_instance.add_attribute(attribute_name)
|
||||
|
||||
async def add_method_to_class(self, class_instance, method_name: str):
|
||||
# add this to database for a particular user and load under memory id
|
||||
await class_instance.add_method(method_name)
|
||||
|
|
@ -5,7 +5,7 @@ from pydantic import BaseModel, Field
|
|||
|
||||
class Node(BaseModel):
|
||||
"""Node in a knowledge graph."""
|
||||
id: int
|
||||
id: str
|
||||
description: str
|
||||
category: str
|
||||
color: str = "blue"
|
||||
|
|
@ -15,8 +15,8 @@ class Node(BaseModel):
|
|||
|
||||
class Edge(BaseModel):
|
||||
"""Edge in a knowledge graph."""
|
||||
source: int
|
||||
target: int
|
||||
source: str
|
||||
target: str
|
||||
description: str
|
||||
color: str = "blue"
|
||||
created_at: Optional[float] = None
|
||||
|
|
@ -81,6 +81,8 @@ class TextSubclass(str, Enum):
|
|||
ANNOTATIONS = "Annotations and metadata for various media"
|
||||
VOCAB_LISTS = "Vocabulary lists and grammar rules"
|
||||
LANGUAGE_EXERCISES = "Language exercises and quizzes"
|
||||
LEGAL_AND_REGULATORY_DOCUMENTS = "Legal and Regulatory Documents"
|
||||
OTHER_TEXT = "Other types of text data"
|
||||
|
||||
class AudioSubclass(str, Enum):
|
||||
MUSIC_TRACKS = "Music tracks and albums"
|
||||
|
|
@ -88,6 +90,7 @@ class AudioSubclass(str, Enum):
|
|||
AUDIOBOOKS = "Audiobooks and audio guides"
|
||||
INTERVIEWS = "Recorded interviews and speeches"
|
||||
SOUND_EFFECTS = "Sound effects and ambient sounds"
|
||||
OTHER_AUDIO = "Other types of audio recordings"
|
||||
|
||||
class ImageSubclass(str, Enum):
|
||||
PHOTOGRAPHS = "Photographs and digital images"
|
||||
|
|
@ -95,6 +98,7 @@ class ImageSubclass(str, Enum):
|
|||
INFOGRAPHICS = "Infographics and visual data representations"
|
||||
ARTWORK = "Artwork and paintings"
|
||||
SCREENSHOTS = "Screenshots and graphical user interfaces"
|
||||
OTHER_IMAGES = "Other types of images"
|
||||
|
||||
class VideoSubclass(str, Enum):
|
||||
MOVIES = "Movies and short films"
|
||||
|
|
@ -102,6 +106,7 @@ class VideoSubclass(str, Enum):
|
|||
TUTORIALS = "Video tutorials and how-to guides"
|
||||
ANIMATED_FEATURES = "Animated features and cartoons"
|
||||
LIVE_EVENTS = "Live event recordings and sports broadcasts"
|
||||
OTHER_VIDEOS = "Other types of video content"
|
||||
|
||||
class MultimediaSubclass(str, Enum):
|
||||
WEB_CONTENT = "Interactive web content and games"
|
||||
|
|
@ -109,6 +114,7 @@ class MultimediaSubclass(str, Enum):
|
|||
MIXED_MEDIA = "Mixed media presentations and slide decks"
|
||||
E_LEARNING_MODULES = "E-learning modules with integrated multimedia"
|
||||
DIGITAL_EXHIBITIONS = "Digital exhibitions and virtual tours"
|
||||
OTHER_MULTIMEDIA = "Other types of multimedia content"
|
||||
|
||||
class Model3DSubclass(str, Enum):
|
||||
ARCHITECTURAL_RENDERINGS = "Architectural renderings and building plans"
|
||||
|
|
@ -116,12 +122,14 @@ class Model3DSubclass(str, Enum):
|
|||
ANIMATIONS = "3D animations and character models"
|
||||
SCIENTIFIC_VISUALIZATIONS = "Scientific simulations and visualizations"
|
||||
VR_OBJECTS = "Virtual objects for AR/VR applications"
|
||||
OTHER_3D_MODELS = "Other types of 3D models"
|
||||
|
||||
class ProceduralSubclass(str, Enum):
|
||||
TUTORIALS_GUIDES = "Tutorials and step-by-step guides"
|
||||
WORKFLOW_DESCRIPTIONS = "Workflow and process descriptions"
|
||||
SIMULATIONS = "Simulation and training exercises"
|
||||
RECIPES = "Recipes and crafting instructions"
|
||||
OTHER_PROCEDURAL = "Other types of procedural content"
|
||||
|
||||
class ContentType(BaseModel):
|
||||
"""Base class for different types of content."""
|
||||
|
|
|
|||
|
|
@ -2,7 +2,8 @@
|
|||
|
||||
import os
|
||||
import graphistry
|
||||
from cognee.root_dir import get_absolute_path
|
||||
import pandas as pd
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
def get_document_names(doc_input):
|
||||
"""
|
||||
|
|
@ -73,25 +74,23 @@ def format_dict(d):
|
|||
#
|
||||
# import networkx as nx
|
||||
#
|
||||
# Create a simple NetworkX graph
|
||||
# # Create a simple NetworkX graph
|
||||
# G = nx.Graph()
|
||||
#
|
||||
# # Add nodes
|
||||
# G.add_node(1)
|
||||
# G.add_node(2)
|
||||
#
|
||||
# Add an edge between nodes
|
||||
# # Add an edge between nodes
|
||||
# G.add_edge(1, 2)
|
||||
#
|
||||
# # Render the graph
|
||||
# import asyncio
|
||||
#
|
||||
# Define the graph type (for this example, it's just a placeholder as the function doesn't use it yet)
|
||||
# graph_type = "networkx"
|
||||
# # Call the render_graph function
|
||||
# asyncio.run(render_graph(G))
|
||||
#
|
||||
# Call the render_graph function
|
||||
# asyncio.run(render_graph(G, graph_type))
|
||||
#
|
||||
async def render_graph(graph, graph_type):
|
||||
async def render_graph(graph):
|
||||
# Authenticate with your Graphistry API key
|
||||
|
||||
import networkx as nx
|
||||
|
|
@ -109,9 +108,32 @@ async def render_graph(graph, graph_type):
|
|||
# Convert the NetworkX graph to a Pandas DataFrame representing the edge list
|
||||
edges = nx.to_pandas_edgelist(graph)
|
||||
|
||||
# Prepare nodes DataFrame with "id" and "layer_description"
|
||||
nodes_data = [{"id": node, "layer_description": graph.nodes[node]["layer_description"]}
|
||||
for node in graph.nodes if "layer_description" in graph.nodes[node]]
|
||||
nodes = pd.DataFrame(nodes_data)
|
||||
|
||||
# Visualize the graph using Graphistry
|
||||
plotter = graphistry.edges(edges, "source", "target")
|
||||
plotter = graphistry.edges(edges, "source", "target").nodes(nodes, "id")
|
||||
|
||||
# Generate a dynamic color palette based on unique "layer_description" values
|
||||
unique_layers = nodes["layer_description"].unique()
|
||||
color_palette = generate_color_palette(unique_layers)
|
||||
|
||||
plotter = plotter.encode_point_color(
|
||||
"layer_description",
|
||||
categorical_mapping = color_palette,
|
||||
default_mapping = "silver" # Default color if any "layer_description" is not in the mapping
|
||||
)
|
||||
|
||||
# Visualize the graph (this will open a URL in your default web browser)
|
||||
url = plotter.plot(render = False, as_files = True)
|
||||
print(f"Graph is visualized at: {url}")
|
||||
|
||||
|
||||
def generate_color_palette(unique_layers):
|
||||
colormap = plt.cm.get_cmap("viridis", len(unique_layers))
|
||||
colors = [colormap(i) for i in range(len(unique_layers))]
|
||||
hex_colors = ["#%02x%02x%02x" % (int(rgb[0]*255), int(rgb[1]*255), int(rgb[2]*255)) for rgb in colors]
|
||||
|
||||
return dict(zip(unique_layers, hex_colors))
|
||||
|
|
|
|||
128
docs/index.md
128
docs/index.md
|
|
@ -16,12 +16,8 @@ _Open-source framework for creating knowledge graphs and data models for LLMs._
|
|||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
cognee makes it easy to reliably enrich data for Large Language Models (LLMs) like GPT-3.5, GPT-4, GPT-4-Vision, including in the future the open source models like Mistral/Mixtral from Together, Anyscale, Ollama, and llama-cpp-python.
|
||||
|
||||
|
||||
By leveraging various tools like graph databases, function calling, tool calling and Pydantic; cognee stands out for its aim to emulate human memory for LLM apps and frameworks.
|
||||
|
||||
We leverage Neo4j to do the heavy lifting and dlt to load the data, and we've built a simple, easy-to-use API on top of it by helping you manage your context
|
||||
|
|
@ -30,88 +26,126 @@ We leverage Neo4j to do the heavy lifting and dlt to load the data, and we've bu
|
|||
|
||||
## Getting Started
|
||||
|
||||
### Setup
|
||||
|
||||
Create `.env` file in your project in order to store environment variables such as API keys.
|
||||
|
||||
Note: Don't push `.env` file to git repo as it will expose those keys to others.
|
||||
|
||||
If cognee is installed with Weaviate as a vector database provider, add Weaviate environment variables.
|
||||
```
|
||||
WEAVIATE_URL = {YOUR_WEAVIATE_URL}
|
||||
WEAVIATE_API_KEY = {YOUR_WEAVIATE_API_KEY}
|
||||
```
|
||||
|
||||
pip install -U cognee["weaviate"]
|
||||
|
||||
Otherwise if cognee is installed with a default (Qdrant) vector database provider, add Qdrant environment variables.
|
||||
```
|
||||
Set OpenAI API Key as an environment variable
|
||||
|
||||
|
||||
```
|
||||
import os
|
||||
|
||||
# Setting an environment variable
|
||||
os.environ['OPENAI_API_KEY'] = ''
|
||||
|
||||
|
||||
QDRANT_URL = {YOUR_QDRANT_URL}
|
||||
QDRANT_API_KEY = {YOUR_QDRANT_API_KEY}
|
||||
```
|
||||
|
||||
Import cognee and start using it
|
||||
|
||||
Add OpenAI API Key environment variable
|
||||
```
|
||||
OPENAI_API_KEY = {YOUR_OPENAI_API_KEY}
|
||||
```
|
||||
|
||||
Cognee stores data and system files inside the library directory, which is lost if the library folder is removed.
|
||||
You can change the directories where cognee will store data and system files by calling config functions.
|
||||
```
|
||||
import cognee
|
||||
from os import listdir, path
|
||||
from cognee import add
|
||||
|
||||
data_path = path.abspath(".data")
|
||||
cognee.config.system_root_directory(absolute_path_to_directory)
|
||||
|
||||
results = await add(data_path, "izmene")
|
||||
for result in results:
|
||||
print(result)
|
||||
|
||||
cognee.config.data_root_directory(absolute_path_to_directory)
|
||||
```
|
||||
|
||||
Run the following command to see the graph.
|
||||
Make sure to add your Graphistry credentials to .env beforehand
|
||||
### Run
|
||||
|
||||
Add a new piece of information to storage
|
||||
```
|
||||
import cognee
|
||||
|
||||
cognee.add("some_text", dataset_name)
|
||||
|
||||
cognee.add([
|
||||
"some_text_1",
|
||||
"some_text_2",
|
||||
"some_text_3",
|
||||
...
|
||||
])
|
||||
```
|
||||
Or
|
||||
```
|
||||
cognee.add("file://{absolute_path_to_file}", dataset_name)
|
||||
|
||||
cognee.add(
|
||||
[
|
||||
"file://{absolute_path_to_file_1}",
|
||||
"file://{absolute_path_to_file_2}",
|
||||
"file://{absolute_path_to_file_3}",
|
||||
...
|
||||
],
|
||||
dataset_name
|
||||
)
|
||||
```
|
||||
Or
|
||||
```
|
||||
cognee.add("data://{absolute_path_to_directory}", dataset_name)
|
||||
|
||||
# This is useful if you have a directory with files organized in subdirectories.
|
||||
# You can target which directory to add by providing dataset_name.
|
||||
# Example:
|
||||
# root
|
||||
# / \
|
||||
# reports bills
|
||||
# / \
|
||||
# 2024 2023
|
||||
#
|
||||
# cognee.add("data://{absolute_path_to_root}", "reports.2024")
|
||||
# This will add just directory 2024 under reports.
|
||||
```
|
||||
|
||||
Use LLMs and cognee to create graphs
|
||||
```
|
||||
cognee.cognify(dataset_name)
|
||||
```
|
||||
|
||||
Render the graph with our util function
|
||||
|
||||
```
|
||||
from cognee.utils import render_graph
|
||||
|
||||
graph = await cognee.cognify("izmene")
|
||||
graph_url = await render_graph(graph, graph_type = "networkx")
|
||||
graph_url = await render_graph(graph)
|
||||
|
||||
print(graph_url)
|
||||
```
|
||||
|
||||
|
||||
Search the graph for a piece of information
|
||||
|
||||
Query the graph for a piece of information
|
||||
```
|
||||
from cognee import search
|
||||
from cognee.api.v1.search.search import SearchType
|
||||
query_params = {
|
||||
SearchType.SIMILARITY: {'query': 'your search query here'}
|
||||
}
|
||||
out = await search(graph, query_params)
|
||||
|
||||
search_results = cognee.search(graph, query_params)
|
||||
|
||||
print(search_results)
|
||||
```
|
||||
|
||||
|
||||
|
||||
[//]: # (You can also check out our [cookbook](./examples/index.md) to learn more about how to use cognee.)
|
||||
|
||||
|
||||
|
||||
## Why use cognee?
|
||||
|
||||
|
||||
The question of using cognee is fundamentally a question of why to structure data inputs and outputs for your llm workflows.
|
||||
|
||||
|
||||
1. **Cost effective** — cognee extends the capabilities of your LLMs without the need for expensive data processing tools.
|
||||
|
||||
|
||||
2. **Self contained** — cognee runs as a library and is simple to use
|
||||
|
||||
|
||||
3. **Interpretable** — Navigate graphs instead of embeddings to understand your data.
|
||||
|
||||
|
||||
4. **User Guided** cognee lets you control your input and provide your own Pydantic data models
|
||||
|
||||
|
||||
|
||||
## License
|
||||
|
||||
|
||||
This project is licensed under the terms of the MIT License.
|
||||
This project is licensed under the terms of the Apache License 2.0.
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,5 +1,24 @@
|
|||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "aa4e881d",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from os import path\n",
|
||||
"from cognee import config, prune\n",
|
||||
"\n",
|
||||
"data_directory_path = path.abspath(\"../.data\")\n",
|
||||
"config.data_root_directory(data_directory_path)\n",
|
||||
"\n",
|
||||
"cognee_directory_path = path.abspath(\"../.cognee_system\")\n",
|
||||
"config.system_root_directory(cognee_directory_path)\n",
|
||||
"\n",
|
||||
"await prune.prune_system()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
|
|
@ -7,38 +26,42 @@
|
|||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from cognee import config, add, cognify, search\n",
|
||||
"from cognee.utils import render_graph\n",
|
||||
"from os import listdir, path\n",
|
||||
"\n",
|
||||
"data_directory_path = path.abspath(\"../.data\")\n",
|
||||
"\n",
|
||||
"print(data_directory_path)\n",
|
||||
"\n",
|
||||
"config.data_path(data_directory_path)\n",
|
||||
"\n",
|
||||
"# dataset_name = \"pravilnik.energetska efikasnost.sertifikati\"\n",
|
||||
"# await add(\"file://\" + path.abspath(\"../.test_data/062c22df-d99b-599f-90cd-2d325c8bcf69.txt\"), dataset_name)\n",
|
||||
"from os import path\n",
|
||||
"import cognee\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"dataset_name = \"izmene\"\n",
|
||||
"await add(\"data://\" + path.abspath(\"../.data\"), dataset_name)\n",
|
||||
"\n",
|
||||
"# test_text = \"\"\"A quantum computer is a computer that takes advantage of quantum mechanical phenomena.\n",
|
||||
"# At small scales, physical matter exhibits properties of both particles and waves, and quantum computing leverages this behavior, specifically quantum superposition and entanglement, using specialized hardware that supports the preparation and manipulation of quantum states.\n",
|
||||
"# Classical physics cannot explain the operation of these quantum devices, and a scalable quantum computer could perform some calculations exponentially faster (with respect to input size scaling)[2] than any modern \"classical\" computer. In particular, a large-scale quantum computer could break widely used encryption schemes and aid physicists in performing physical simulations; however, the current state of the technology is largely experimental and impractical, with several obstacles to useful applications. Moreover, scalable quantum computers do not hold promise for many practical tasks, and for many important tasks quantum speedups are proven impossible.\n",
|
||||
"# The basic unit of information in quantum computing is the qubit, similar to the bit in traditional digital electronics. Unlike a classical bit, a qubit can exist in a superposition of its two \"basis\" states. When measuring a qubit, the result is a probabilistic output of a classical bit, therefore making quantum computers nondeterministic in general. If a quantum computer manipulates the qubit in a particular way, wave interference effects can amplify the desired measurement results. The design of quantum algorithms involves creating procedures that allow a quantum computer to perform calculations efficiently and quickly.\n",
|
||||
"# Physically engineering high-quality qubits has proven challenging. If a physical qubit is not sufficiently isolated from its environment, it suffers from quantum decoherence, introducing noise into calculations. Paradoxically, perfectly isolating qubits is also undesirable because quantum computations typically need to initialize qubits, perform controlled qubit interactions, and measure the resulting quantum states. Each of those operations introduces errors and suffers from noise, and such inaccuracies accumulate.\n",
|
||||
"# In principle, a non-quantum (classical) computer can solve the same computational problems as a quantum computer, given enough time. Quantum advantage comes in the form of time complexity rather than computability, and quantum complexity theory shows that some quantum algorithms for carefully selected tasks require exponentially fewer computational steps than the best known non-quantum algorithms. Such tasks can in theory be solved on a large-scale quantum computer whereas classical computers would not finish computations in any reasonable amount of time. However, quantum speedup is not universal or even typical across computational tasks, since basic tasks such as sorting are proven to not allow any asymptotic quantum speedup. Claims of quantum supremacy have drawn significant attention to the discipline, but are demonstrated on contrived tasks, while near-term practical use cases remain limited.\n",
|
||||
"# \"\"\"\n",
|
||||
"\n",
|
||||
"# dataset_name = \"pravilnik.energetska efikasnost\"\n",
|
||||
"# await add(test_text, dataset_name)\n",
|
||||
"dataset_name = \"explanations\"\n",
|
||||
"await cognee.add([path.abspath(\"../.test_data/Natural language processing.txt\")], dataset_name)\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"graph = await cognify(dataset_name)\n",
|
||||
"dataset_name = \"short_stories\"\n",
|
||||
"# data_directory_path is defined above\n",
|
||||
"await cognee.add(\"data://\" + data_directory_path, dataset_name)\n",
|
||||
"\n",
|
||||
"await render_graph(graph, graph_type = \"networkx\")\n"
|
||||
"text_1 = \"\"\"A quantum computer is a computer that takes advantage of quantum mechanical phenomena.\n",
|
||||
"At small scales, physical matter exhibits properties of both particles and waves, and quantum computing leverages this behavior, specifically quantum superposition and entanglement, using specialized hardware that supports the preparation and manipulation of quantum states.\n",
|
||||
"Classical physics cannot explain the operation of these quantum devices, and a scalable quantum computer could perform some calculations exponentially faster (with respect to input size scaling) than any modern \"classical\" computer. In particular, a large-scale quantum computer could break widely used encryption schemes and aid physicists in performing physical simulations; however, the current state of the technology is largely experimental and impractical, with several obstacles to useful applications. Moreover, scalable quantum computers do not hold promise for many practical tasks, and for many important tasks quantum speedups are proven impossible.\n",
|
||||
"The basic unit of information in quantum computing is the qubit, similar to the bit in traditional digital electronics. Unlike a classical bit, a qubit can exist in a superposition of its two \"basis\" states. When measuring a qubit, the result is a probabilistic output of a classical bit, therefore making quantum computers nondeterministic in general. If a quantum computer manipulates the qubit in a particular way, wave interference effects can amplify the desired measurement results. The design of quantum algorithms involves creating procedures that allow a quantum computer to perform calculations efficiently and quickly.\n",
|
||||
"Physically engineering high-quality qubits has proven challenging. If a physical qubit is not sufficiently isolated from its environment, it suffers from quantum decoherence, introducing noise into calculations. Paradoxically, perfectly isolating qubits is also undesirable because quantum computations typically need to initialize qubits, perform controlled qubit interactions, and measure the resulting quantum states. Each of those operations introduces errors and suffers from noise, and such inaccuracies accumulate.\n",
|
||||
"In principle, a non-quantum (classical) computer can solve the same computational problems as a quantum computer, given enough time. Quantum advantage comes in the form of time complexity rather than computability, and quantum complexity theory shows that some quantum algorithms for carefully selected tasks require exponentially fewer computational steps than the best known non-quantum algorithms. Such tasks can in theory be solved on a large-scale quantum computer whereas classical computers would not finish computations in any reasonable amount of time. However, quantum speedup is not universal or even typical across computational tasks, since basic tasks such as sorting are proven to not allow any asymptotic quantum speedup. Claims of quantum supremacy have drawn significant attention to the discipline, but are demonstrated on contrived tasks, while near-term practical use cases remain limited.\n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"text_2 = \"\"\"A large language model (LLM) is a language model notable for its ability to achieve general-purpose language generation and other natural language processing tasks such as classification. LLMs acquire these abilities by learning statistical relationships from text documents during a computationally intensive self-supervised and semi-supervised training process. LLMs can be used for text generation, a form of generative AI, by taking an input text and repeatedly predicting the next token or word.\n",
|
||||
"LLMs are artificial neural networks. The largest and most capable, as of March 2024, are built with a decoder-only transformer-based architecture while some recent implementations are based on other architectures, such as recurrent neural network variants and Mamba (a state space model).\n",
|
||||
"Up to 2020, fine tuning was the only way a model could be adapted to be able to accomplish specific tasks. Larger sized models, such as GPT-3, however, can be prompt-engineered to achieve similar results.[6] They are thought to acquire knowledge about syntax, semantics and \"ontology\" inherent in human language corpora, but also inaccuracies and biases present in the corpora.\n",
|
||||
"Some notable LLMs are OpenAI's GPT series of models (e.g., GPT-3.5 and GPT-4, used in ChatGPT and Microsoft Copilot), Google's PaLM and Gemini (the latter of which is currently used in the chatbot of the same name), xAI's Grok, Meta's LLaMA family of open-source models, Anthropic's Claude models, Mistral AI's open source models, and Databricks' open source DBRX.\n",
|
||||
"\"\"\"\n",
|
||||
"\n",
|
||||
"dataset_name = \"explanations\"\n",
|
||||
"await cognee.add(\n",
|
||||
" [\n",
|
||||
" text_1,\n",
|
||||
" text_2\n",
|
||||
" ],\n",
|
||||
" dataset_name\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"graph = await cognee.cognify(dataset_name)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
@ -48,33 +71,27 @@
|
|||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from cognee import list_datasets\n",
|
||||
"import cognee\n",
|
||||
"\n",
|
||||
"print(list_datasets())"
|
||||
"print(cognee.datasets.list_datasets())\n",
|
||||
"\n",
|
||||
"explanations = cognee.datasets.query_data('explanations')\n",
|
||||
"print(len(explanations), explanations)\n",
|
||||
"\n",
|
||||
"stories = cognee.datasets.query_data('short_stories')\n",
|
||||
"print(len(stories), stories)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "66ad66ca",
|
||||
"id": "a81b2bce",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import duckdb\n",
|
||||
"from cognee.root_dir import get_absolute_path\n",
|
||||
"from cognee.utils import render_graph\n",
|
||||
"\n",
|
||||
"db_path = get_absolute_path(\"./data/cognee\")\n",
|
||||
"db_location = db_path + \"/cognee.duckdb\"\n",
|
||||
"print(db_location)\n",
|
||||
"\n",
|
||||
"db = duckdb.connect(db_location)\n",
|
||||
"\n",
|
||||
"izmene = db.sql(f\"SELECT * FROM pravilnik_energetska_efikasnost_sertifikati.file_metadata;\")\n",
|
||||
"print(izmene)\n",
|
||||
"\n",
|
||||
"# pravilnik = db.sql(f\"SELECT * FROM pravilnik.file_metadata;\")\n",
|
||||
"\n",
|
||||
"# print(pravilnik)"
|
||||
"await render_graph(graph)"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
|
@ -94,7 +111,7 @@
|
|||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.13"
|
||||
"version": "3.11.8"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
|
|
|||
342
poetry.lock
generated
342
poetry.lock
generated
|
|
@ -531,17 +531,17 @@ css = ["tinycss2 (>=1.1.0,<1.3)"]
|
|||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.34.73"
|
||||
version = "1.34.74"
|
||||
description = "The AWS SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "boto3-1.34.73-py3-none-any.whl", hash = "sha256:4d68e7c7c1339e251c661fd6e2a34e31d281177106326712417fed839907fa84"},
|
||||
{file = "boto3-1.34.73.tar.gz", hash = "sha256:f45503333286c03fb692a3ce497b6fdb4e88c51c98a3b8ff05071d7f56571448"},
|
||||
{file = "boto3-1.34.74-py3-none-any.whl", hash = "sha256:71f551491fb12fe07727d371d5561c5919fdf33dbc1d4251c57940d267a53a9e"},
|
||||
{file = "boto3-1.34.74.tar.gz", hash = "sha256:b703e22775561a748adc4576c30424b81abd2a00d3c6fb28eec2e5cde92c1eed"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.34.73,<1.35.0"
|
||||
botocore = ">=1.34.74,<1.35.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
s3transfer = ">=0.10.0,<0.11.0"
|
||||
|
||||
|
|
@ -550,13 +550,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
|||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.34.73"
|
||||
version = "1.34.74"
|
||||
description = "Low-level, data-driven core of boto 3."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "botocore-1.34.73-py3-none-any.whl", hash = "sha256:88d660b711cc5b5b049e15d547cb09526f86e48c15b78dacad78522109502b91"},
|
||||
{file = "botocore-1.34.73.tar.gz", hash = "sha256:8df020b6682b9f1e9ee7b0554d5d0c14b7b23e3de070c85bcdf07fb20bfe4e2b"},
|
||||
{file = "botocore-1.34.74-py3-none-any.whl", hash = "sha256:5d2015b5d91d6c402c122783729ce995ed7283a746b0380957026dc2b3b75969"},
|
||||
{file = "botocore-1.34.74.tar.gz", hash = "sha256:32bb519bae62483893330c18a0ea4fd09d1ffa32bc573cd8559c2d9a08fb8c5c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -841,6 +841,69 @@ traitlets = ">=4"
|
|||
[package.extras]
|
||||
test = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "contourpy"
|
||||
version = "1.2.0"
|
||||
description = "Python library for calculating contours of 2D quadrilateral grids"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "contourpy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0274c1cb63625972c0c007ab14dd9ba9e199c36ae1a231ce45d725cbcbfd10a8"},
|
||||
{file = "contourpy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ab459a1cbbf18e8698399c595a01f6dcc5c138220ca3ea9e7e6126232d102bb4"},
|
||||
{file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fdd887f17c2f4572ce548461e4f96396681212d858cae7bd52ba3310bc6f00f"},
|
||||
{file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d16edfc3fc09968e09ddffada434b3bf989bf4911535e04eada58469873e28e"},
|
||||
{file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c203f617abc0dde5792beb586f827021069fb6d403d7f4d5c2b543d87edceb9"},
|
||||
{file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b69303ceb2e4d4f146bf82fda78891ef7bcd80c41bf16bfca3d0d7eb545448aa"},
|
||||
{file = "contourpy-1.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:884c3f9d42d7218304bc74a8a7693d172685c84bd7ab2bab1ee567b769696df9"},
|
||||
{file = "contourpy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4a1b1208102be6e851f20066bf0e7a96b7d48a07c9b0cfe6d0d4545c2f6cadab"},
|
||||
{file = "contourpy-1.2.0-cp310-cp310-win32.whl", hash = "sha256:34b9071c040d6fe45d9826cbbe3727d20d83f1b6110d219b83eb0e2a01d79488"},
|
||||
{file = "contourpy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:bd2f1ae63998da104f16a8b788f685e55d65760cd1929518fd94cd682bf03e41"},
|
||||
{file = "contourpy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dd10c26b4eadae44783c45ad6655220426f971c61d9b239e6f7b16d5cdaaa727"},
|
||||
{file = "contourpy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c6b28956b7b232ae801406e529ad7b350d3f09a4fde958dfdf3c0520cdde0dd"},
|
||||
{file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebeac59e9e1eb4b84940d076d9f9a6cec0064e241818bcb6e32124cc5c3e377a"},
|
||||
{file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:139d8d2e1c1dd52d78682f505e980f592ba53c9f73bd6be102233e358b401063"},
|
||||
{file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e9dc350fb4c58adc64df3e0703ab076f60aac06e67d48b3848c23647ae4310e"},
|
||||
{file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18fc2b4ed8e4a8fe849d18dce4bd3c7ea637758c6343a1f2bae1e9bd4c9f4686"},
|
||||
{file = "contourpy-1.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:16a7380e943a6d52472096cb7ad5264ecee36ed60888e2a3d3814991a0107286"},
|
||||
{file = "contourpy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8d8faf05be5ec8e02a4d86f616fc2a0322ff4a4ce26c0f09d9f7fb5330a35c95"},
|
||||
{file = "contourpy-1.2.0-cp311-cp311-win32.whl", hash = "sha256:67b7f17679fa62ec82b7e3e611c43a016b887bd64fb933b3ae8638583006c6d6"},
|
||||
{file = "contourpy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:99ad97258985328b4f207a5e777c1b44a83bfe7cf1f87b99f9c11d4ee477c4de"},
|
||||
{file = "contourpy-1.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:575bcaf957a25d1194903a10bc9f316c136c19f24e0985a2b9b5608bdf5dbfe0"},
|
||||
{file = "contourpy-1.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9e6c93b5b2dbcedad20a2f18ec22cae47da0d705d454308063421a3b290d9ea4"},
|
||||
{file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:464b423bc2a009088f19bdf1f232299e8b6917963e2b7e1d277da5041f33a779"},
|
||||
{file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68ce4788b7d93e47f84edd3f1f95acdcd142ae60bc0e5493bfd120683d2d4316"},
|
||||
{file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d7d1f8871998cdff5d2ff6a087e5e1780139abe2838e85b0b46b7ae6cc25399"},
|
||||
{file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e739530c662a8d6d42c37c2ed52a6f0932c2d4a3e8c1f90692ad0ce1274abe0"},
|
||||
{file = "contourpy-1.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:247b9d16535acaa766d03037d8e8fb20866d054d3c7fbf6fd1f993f11fc60ca0"},
|
||||
{file = "contourpy-1.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:461e3ae84cd90b30f8d533f07d87c00379644205b1d33a5ea03381edc4b69431"},
|
||||
{file = "contourpy-1.2.0-cp312-cp312-win32.whl", hash = "sha256:1c2559d6cffc94890b0529ea7eeecc20d6fadc1539273aa27faf503eb4656d8f"},
|
||||
{file = "contourpy-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:491b1917afdd8638a05b611a56d46587d5a632cabead889a5440f7c638bc6ed9"},
|
||||
{file = "contourpy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5fd1810973a375ca0e097dee059c407913ba35723b111df75671a1976efa04bc"},
|
||||
{file = "contourpy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:999c71939aad2780f003979b25ac5b8f2df651dac7b38fb8ce6c46ba5abe6ae9"},
|
||||
{file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7caf9b241464c404613512d5594a6e2ff0cc9cb5615c9475cc1d9b514218ae8"},
|
||||
{file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:266270c6f6608340f6c9836a0fb9b367be61dde0c9a9a18d5ece97774105ff3e"},
|
||||
{file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbd50d0a0539ae2e96e537553aff6d02c10ed165ef40c65b0e27e744a0f10af8"},
|
||||
{file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11f8d2554e52f459918f7b8e6aa20ec2a3bce35ce95c1f0ef4ba36fbda306df5"},
|
||||
{file = "contourpy-1.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ce96dd400486e80ac7d195b2d800b03e3e6a787e2a522bfb83755938465a819e"},
|
||||
{file = "contourpy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6d3364b999c62f539cd403f8123ae426da946e142312a514162adb2addd8d808"},
|
||||
{file = "contourpy-1.2.0-cp39-cp39-win32.whl", hash = "sha256:1c88dfb9e0c77612febebb6ac69d44a8d81e3dc60f993215425b62c1161353f4"},
|
||||
{file = "contourpy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:78e6ad33cf2e2e80c5dfaaa0beec3d61face0fb650557100ee36db808bfa6843"},
|
||||
{file = "contourpy-1.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be16975d94c320432657ad2402f6760990cb640c161ae6da1363051805fa8108"},
|
||||
{file = "contourpy-1.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b95a225d4948b26a28c08307a60ac00fb8671b14f2047fc5476613252a129776"},
|
||||
{file = "contourpy-1.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d7e03c0f9a4f90dc18d4e77e9ef4ec7b7bbb437f7f675be8e530d65ae6ef956"},
|
||||
{file = "contourpy-1.2.0.tar.gz", hash = "sha256:171f311cb758de7da13fc53af221ae47a5877be5a0843a9fe150818c51ed276a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
numpy = ">=1.20,<2.0"
|
||||
|
||||
[package.extras]
|
||||
bokeh = ["bokeh", "selenium"]
|
||||
docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"]
|
||||
mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.6.1)", "types-Pillow"]
|
||||
test = ["Pillow", "contourpy[test-no-images]", "matplotlib"]
|
||||
test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.4.4"
|
||||
|
|
@ -988,6 +1051,21 @@ webencodings = "*"
|
|||
doc = ["sphinx", "sphinx_rtd_theme"]
|
||||
test = ["flake8", "isort", "pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "cycler"
|
||||
version = "0.12.1"
|
||||
description = "Composable style cycles"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"},
|
||||
{file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["ipython", "matplotlib", "numpydoc", "sphinx"]
|
||||
tests = ["pytest", "pytest-cov", "pytest-xdist"]
|
||||
|
||||
[[package]]
|
||||
name = "debugpy"
|
||||
version = "1.8.1"
|
||||
|
|
@ -1326,6 +1404,71 @@ files = [
|
|||
{file = "flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fonttools"
|
||||
version = "4.50.0"
|
||||
description = "Tools to manipulate font files"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "fonttools-4.50.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effd303fb422f8ce06543a36ca69148471144c534cc25f30e5be752bc4f46736"},
|
||||
{file = "fonttools-4.50.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7913992ab836f621d06aabac118fc258b9947a775a607e1a737eb3a91c360335"},
|
||||
{file = "fonttools-4.50.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e0a1c5bd2f63da4043b63888534b52c5a1fd7ae187c8ffc64cbb7ae475b9dab"},
|
||||
{file = "fonttools-4.50.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d40fc98540fa5360e7ecf2c56ddf3c6e7dd04929543618fd7b5cc76e66390562"},
|
||||
{file = "fonttools-4.50.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fff65fbb7afe137bac3113827855e0204482727bddd00a806034ab0d3951d0d"},
|
||||
{file = "fonttools-4.50.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1aeae3dd2ee719074a9372c89ad94f7c581903306d76befdaca2a559f802472"},
|
||||
{file = "fonttools-4.50.0-cp310-cp310-win32.whl", hash = "sha256:e9623afa319405da33b43c85cceb0585a6f5d3a1d7c604daf4f7e1dd55c03d1f"},
|
||||
{file = "fonttools-4.50.0-cp310-cp310-win_amd64.whl", hash = "sha256:778c5f43e7e654ef7fe0605e80894930bc3a7772e2f496238e57218610140f54"},
|
||||
{file = "fonttools-4.50.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3dfb102e7f63b78c832e4539969167ffcc0375b013080e6472350965a5fe8048"},
|
||||
{file = "fonttools-4.50.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e58fe34cb379ba3d01d5d319d67dd3ce7ca9a47ad044ea2b22635cd2d1247fc"},
|
||||
{file = "fonttools-4.50.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c673ab40d15a442a4e6eb09bf007c1dda47c84ac1e2eecbdf359adacb799c24"},
|
||||
{file = "fonttools-4.50.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b3ac35cdcd1a4c90c23a5200212c1bb74fa05833cc7c14291d7043a52ca2aaa"},
|
||||
{file = "fonttools-4.50.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8844e7a2c5f7ecf977e82eb6b3014f025c8b454e046d941ece05b768be5847ae"},
|
||||
{file = "fonttools-4.50.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f849bd3c5c2249b49c98eca5aaebb920d2bfd92b3c69e84ca9bddf133e9f83f0"},
|
||||
{file = "fonttools-4.50.0-cp311-cp311-win32.whl", hash = "sha256:39293ff231b36b035575e81c14626dfc14407a20de5262f9596c2cbb199c3625"},
|
||||
{file = "fonttools-4.50.0-cp311-cp311-win_amd64.whl", hash = "sha256:c33d5023523b44d3481624f840c8646656a1def7630ca562f222eb3ead16c438"},
|
||||
{file = "fonttools-4.50.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b4a886a6dbe60100ba1cd24de962f8cd18139bd32808da80de1fa9f9f27bf1dc"},
|
||||
{file = "fonttools-4.50.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b2ca1837bfbe5eafa11313dbc7edada79052709a1fffa10cea691210af4aa1fa"},
|
||||
{file = "fonttools-4.50.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0493dd97ac8977e48ffc1476b932b37c847cbb87fd68673dee5182004906828"},
|
||||
{file = "fonttools-4.50.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77844e2f1b0889120b6c222fc49b2b75c3d88b930615e98893b899b9352a27ea"},
|
||||
{file = "fonttools-4.50.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3566bfb8c55ed9100afe1ba6f0f12265cd63a1387b9661eb6031a1578a28bad1"},
|
||||
{file = "fonttools-4.50.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:35e10ddbc129cf61775d58a14f2d44121178d89874d32cae1eac722e687d9019"},
|
||||
{file = "fonttools-4.50.0-cp312-cp312-win32.whl", hash = "sha256:cc8140baf9fa8f9b903f2b393a6c413a220fa990264b215bf48484f3d0bf8710"},
|
||||
{file = "fonttools-4.50.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ccc85fd96373ab73c59833b824d7a73846670a0cb1f3afbaee2b2c426a8f931"},
|
||||
{file = "fonttools-4.50.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e270a406219af37581d96c810172001ec536e29e5593aa40d4c01cca3e145aa6"},
|
||||
{file = "fonttools-4.50.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac2463de667233372e9e1c7e9de3d914b708437ef52a3199fdbf5a60184f190c"},
|
||||
{file = "fonttools-4.50.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47abd6669195abe87c22750dbcd366dc3a0648f1b7c93c2baa97429c4dc1506e"},
|
||||
{file = "fonttools-4.50.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:074841375e2e3d559aecc86e1224caf78e8b8417bb391e7d2506412538f21adc"},
|
||||
{file = "fonttools-4.50.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0743fd2191ad7ab43d78cd747215b12033ddee24fa1e088605a3efe80d6984de"},
|
||||
{file = "fonttools-4.50.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3d7080cce7be5ed65bee3496f09f79a82865a514863197ff4d4d177389e981b0"},
|
||||
{file = "fonttools-4.50.0-cp38-cp38-win32.whl", hash = "sha256:a467ba4e2eadc1d5cc1a11d355abb945f680473fbe30d15617e104c81f483045"},
|
||||
{file = "fonttools-4.50.0-cp38-cp38-win_amd64.whl", hash = "sha256:f77e048f805e00870659d6318fd89ef28ca4ee16a22b4c5e1905b735495fc422"},
|
||||
{file = "fonttools-4.50.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b6245eafd553c4e9a0708e93be51392bd2288c773523892fbd616d33fd2fda59"},
|
||||
{file = "fonttools-4.50.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a4062cc7e8de26f1603323ef3ae2171c9d29c8a9f5e067d555a2813cd5c7a7e0"},
|
||||
{file = "fonttools-4.50.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34692850dfd64ba06af61e5791a441f664cb7d21e7b544e8f385718430e8f8e4"},
|
||||
{file = "fonttools-4.50.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678dd95f26a67e02c50dcb5bf250f95231d455642afbc65a3b0bcdacd4e4dd38"},
|
||||
{file = "fonttools-4.50.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f2ce7b0b295fe64ac0a85aef46a0f2614995774bd7bc643b85679c0283287f9"},
|
||||
{file = "fonttools-4.50.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d346f4dc2221bfb7ab652d1e37d327578434ce559baf7113b0f55768437fe6a0"},
|
||||
{file = "fonttools-4.50.0-cp39-cp39-win32.whl", hash = "sha256:a51eeaf52ba3afd70bf489be20e52fdfafe6c03d652b02477c6ce23c995222f4"},
|
||||
{file = "fonttools-4.50.0-cp39-cp39-win_amd64.whl", hash = "sha256:8639be40d583e5d9da67795aa3eeeda0488fb577a1d42ae11a5036f18fb16d93"},
|
||||
{file = "fonttools-4.50.0-py3-none-any.whl", hash = "sha256:48fa36da06247aa8282766cfd63efff1bb24e55f020f29a335939ed3844d20d3"},
|
||||
{file = "fonttools-4.50.0.tar.gz", hash = "sha256:fa5cf61058c7dbb104c2ac4e782bf1b2016a8cf2f69de6e4dd6a865d2c969bb5"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"]
|
||||
graphite = ["lz4 (>=1.7.4.2)"]
|
||||
interpolatable = ["munkres", "pycairo", "scipy"]
|
||||
lxml = ["lxml (>=4.0)"]
|
||||
pathops = ["skia-pathops (>=0.5.0)"]
|
||||
plot = ["matplotlib"]
|
||||
repacker = ["uharfbuzz (>=0.23.0)"]
|
||||
symfont = ["sympy"]
|
||||
type1 = ["xattr"]
|
||||
ufo = ["fs (>=2.2.0,<3)"]
|
||||
unicode = ["unicodedata2 (>=15.1.0)"]
|
||||
woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"]
|
||||
|
||||
[[package]]
|
||||
name = "fqdn"
|
||||
version = "1.5.1"
|
||||
|
|
@ -2525,6 +2668,119 @@ test-functional = ["jupytext[test]"]
|
|||
test-integration = ["ipykernel", "jupyter-server (!=2.11)", "jupytext[test-functional]", "nbconvert"]
|
||||
test-ui = ["calysto-bash"]
|
||||
|
||||
[[package]]
|
||||
name = "kiwisolver"
|
||||
version = "1.4.5"
|
||||
description = "A fast implementation of the Cassowary constraint solver"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"},
|
||||
{file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"},
|
||||
{file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"},
|
||||
{file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"},
|
||||
{file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"},
|
||||
{file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"},
|
||||
{file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"},
|
||||
{file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"},
|
||||
{file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"},
|
||||
{file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"},
|
||||
{file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"},
|
||||
{file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"},
|
||||
{file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"},
|
||||
{file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"},
|
||||
{file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"},
|
||||
{file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"},
|
||||
{file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"},
|
||||
{file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"},
|
||||
{file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"},
|
||||
{file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"},
|
||||
{file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"},
|
||||
{file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"},
|
||||
{file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "loguru"
|
||||
version = "0.7.2"
|
||||
|
|
@ -2662,6 +2918,54 @@ files = [
|
|||
{file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matplotlib"
|
||||
version = "3.8.3"
|
||||
description = "Python plotting package"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "matplotlib-3.8.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cf60138ccc8004f117ab2a2bad513cc4d122e55864b4fe7adf4db20ca68a078f"},
|
||||
{file = "matplotlib-3.8.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f557156f7116be3340cdeef7f128fa99b0d5d287d5f41a16e169819dcf22357"},
|
||||
{file = "matplotlib-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f386cf162b059809ecfac3bcc491a9ea17da69fa35c8ded8ad154cd4b933d5ec"},
|
||||
{file = "matplotlib-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3c5f96f57b0369c288bf6f9b5274ba45787f7e0589a34d24bdbaf6d3344632f"},
|
||||
{file = "matplotlib-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:83e0f72e2c116ca7e571c57aa29b0fe697d4c6425c4e87c6e994159e0c008635"},
|
||||
{file = "matplotlib-3.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:1c5c8290074ba31a41db1dc332dc2b62def469ff33766cbe325d32a3ee291aea"},
|
||||
{file = "matplotlib-3.8.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5184e07c7e1d6d1481862ee361905b7059f7fe065fc837f7c3dc11eeb3f2f900"},
|
||||
{file = "matplotlib-3.8.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d7e7e0993d0758933b1a241a432b42c2db22dfa37d4108342ab4afb9557cbe3e"},
|
||||
{file = "matplotlib-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04b36ad07eac9740fc76c2aa16edf94e50b297d6eb4c081e3add863de4bb19a7"},
|
||||
{file = "matplotlib-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c42dae72a62f14982f1474f7e5c9959fc4bc70c9de11cc5244c6e766200ba65"},
|
||||
{file = "matplotlib-3.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf5932eee0d428192c40b7eac1399d608f5d995f975cdb9d1e6b48539a5ad8d0"},
|
||||
{file = "matplotlib-3.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:40321634e3a05ed02abf7c7b47a50be50b53ef3eaa3a573847431a545585b407"},
|
||||
{file = "matplotlib-3.8.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:09074f8057917d17ab52c242fdf4916f30e99959c1908958b1fc6032e2d0f6d4"},
|
||||
{file = "matplotlib-3.8.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5745f6d0fb5acfabbb2790318db03809a253096e98c91b9a31969df28ee604aa"},
|
||||
{file = "matplotlib-3.8.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97653d869a71721b639714b42d87cda4cfee0ee74b47c569e4874c7590c55c5"},
|
||||
{file = "matplotlib-3.8.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:242489efdb75b690c9c2e70bb5c6550727058c8a614e4c7716f363c27e10bba1"},
|
||||
{file = "matplotlib-3.8.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:83c0653c64b73926730bd9ea14aa0f50f202ba187c307a881673bad4985967b7"},
|
||||
{file = "matplotlib-3.8.3-cp312-cp312-win_amd64.whl", hash = "sha256:ef6c1025a570354297d6c15f7d0f296d95f88bd3850066b7f1e7b4f2f4c13a39"},
|
||||
{file = "matplotlib-3.8.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c4af3f7317f8a1009bbb2d0bf23dfaba859eb7dd4ccbd604eba146dccaaaf0a4"},
|
||||
{file = "matplotlib-3.8.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c6e00a65d017d26009bac6808f637b75ceade3e1ff91a138576f6b3065eeeba"},
|
||||
{file = "matplotlib-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7b49ab49a3bea17802df6872f8d44f664ba8f9be0632a60c99b20b6db2165b7"},
|
||||
{file = "matplotlib-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6728dde0a3997396b053602dbd907a9bd64ec7d5cf99e728b404083698d3ca01"},
|
||||
{file = "matplotlib-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:813925d08fb86aba139f2d31864928d67511f64e5945ca909ad5bc09a96189bb"},
|
||||
{file = "matplotlib-3.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:cd3a0c2be76f4e7be03d34a14d49ded6acf22ef61f88da600a18a5cd8b3c5f3c"},
|
||||
{file = "matplotlib-3.8.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fa93695d5c08544f4a0dfd0965f378e7afc410d8672816aff1e81be1f45dbf2e"},
|
||||
{file = "matplotlib-3.8.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9764df0e8778f06414b9d281a75235c1e85071f64bb5d71564b97c1306a2afc"},
|
||||
{file = "matplotlib-3.8.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5e431a09e6fab4012b01fc155db0ce6dccacdbabe8198197f523a4ef4805eb26"},
|
||||
{file = "matplotlib-3.8.3.tar.gz", hash = "sha256:7b416239e9ae38be54b028abbf9048aff5054a9aba5416bef0bd17f9162ce161"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
contourpy = ">=1.0.1"
|
||||
cycler = ">=0.10"
|
||||
fonttools = ">=4.22.0"
|
||||
kiwisolver = ">=1.3.1"
|
||||
numpy = ">=1.21,<2"
|
||||
packaging = ">=20.0"
|
||||
pillow = ">=8"
|
||||
pyparsing = ">=2.3.1"
|
||||
python-dateutil = ">=2.7"
|
||||
|
||||
[[package]]
|
||||
name = "matplotlib-inline"
|
||||
version = "0.1.6"
|
||||
|
|
@ -4057,13 +4361,13 @@ numpy = ">=1.16.6,<2"
|
|||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
version = "2.21"
|
||||
version = "2.22"
|
||||
description = "C parser in Python"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
|
||||
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
|
||||
{file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
|
||||
{file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -4233,6 +4537,20 @@ pyyaml = "*"
|
|||
[package.extras]
|
||||
extra = ["pygments (>=2.12)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.1.2"
|
||||
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
|
||||
optional = false
|
||||
python-versions = ">=3.6.8"
|
||||
files = [
|
||||
{file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"},
|
||||
{file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
diagrams = ["jinja2", "railroad-diagrams"]
|
||||
|
||||
[[package]]
|
||||
name = "pypdf"
|
||||
version = "4.1.0"
|
||||
|
|
@ -6122,4 +6440,4 @@ weaviate = ["weaviate-client"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "~3.11"
|
||||
content-hash = "d556cc86b6310e0c6020d900d4f958e067fd235c40a1df9e3139ca80625c923c"
|
||||
content-hash = "9640f8b1809db36e455d285e3e338d572669154be7a1ed74a501d6c9044ab1ae"
|
||||
|
|
|
|||
|
|
@ -52,6 +52,8 @@ fastembed = "^0.2.5"
|
|||
pypdf = "^4.1.0"
|
||||
anthropic = "^0.21.3"
|
||||
xmltodict = "^0.13.0"
|
||||
jinja2 = "^3.1.3"
|
||||
matplotlib = "^3.8.3"
|
||||
|
||||
[tool.poetry.extras]
|
||||
dbt = ["dbt-core", "dbt-redshift", "dbt-bigquery", "dbt-duckdb", "dbt-snowflake", "dbt-athena-community", "dbt-databricks"]
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue