Merge branch 'main' of github.com:topoteretes/cognee into COG-170-PGvector-adapter

This commit is contained in:
Igor Ilic 2024-10-22 12:41:18 +02:00
commit 88ded6e1a8
28 changed files with 391 additions and 49 deletions

View file

@ -8,14 +8,14 @@ GRAPHISTRY_PASSWORD=
SENTRY_REPORTING_URL=
# "neo4j" or "networkx"
GRAPH_DATABASE_PROVIDER="neo4j"
GRAPH_DATABASE_PROVIDER="neo4j"
# Not needed if using networkx
GRAPH_DATABASE_URL=
GRAPH_DATABASE_USERNAME=
GRAPH_DATABASE_PASSWORD=
# "qdrant", "pgvector", "weaviate" or "lancedb"
VECTOR_ENGINE_PROVIDER="qdrant"
VECTOR_DB_PROVIDER="qdrant"
# Not needed if using "lancedb" or "pgvector"
VECTOR_DB_URL=
VECTOR_DB_KEY=

2
.gitignore vendored
View file

@ -177,3 +177,5 @@ cognee/cache/
# Default cognee system directory, used in development
.cognee_system/
.data_storage/
node_modules/

View file

@ -8,9 +8,17 @@ ENV DEBUG=${DEBUG}
ENV PIP_NO_CACHE_DIR=true
ENV PATH="${PATH}:/root/.poetry/bin"
RUN apt-get update && apt-get install
RUN apt-get install -y \
gcc \
libpq-dev
WORKDIR /app
COPY pyproject.toml poetry.lock /app/
RUN pip install poetry
# Don't create virtualenv since docker is already isolated
@ -18,15 +26,16 @@ RUN poetry config virtualenvs.create false
# Install the dependencies
RUN poetry install --no-root --no-dev
# Set the PYTHONPATH environment variable to include the /app directory
ENV PYTHONPATH=/app
COPY cognee/ cognee/
COPY cognee/ /app/cognee
# Copy Alembic configuration
COPY alembic.ini ./
COPY alembic/ alembic/
COPY alembic.ini /app/alembic.ini
COPY alembic/ /app/alembic
COPY entrypoint.sh /app/entrypoint.sh
RUN chmod +x /app/entrypoint.sh

View file

@ -55,7 +55,7 @@ cognee.config.llm_api_key = "YOUR_OPENAI_API_KEY"
You can also set the variables by creating .env file, here is our <a href="https://github.com/topoteretes/cognee/blob/main/.env.template">template.</a>
To use different LLM providers, for more info check out our <a href="https://topoteretes.github.io/cognee">documentation</a>
If you are using Networkx, create an account on Graphistry to visualize results:
If you are using Network, create an account on Graphistry to visualize results:
```
cognee.config.set_graphistry_config({
"username": "YOUR_USERNAME",
@ -162,7 +162,7 @@ async def chunk_naive_llm_classifier(
```
We have a large number of tasks that can be used in your pipelines, and you can also create your own tasks to fit your business logic.
We have many tasks that can be used in your pipelines, and you can also create your tasks to fit your business logic.
3. Once we have our tasks, it is time to group them into a pipeline.

View file

@ -92,6 +92,8 @@ if db_engine.engine.dialect.name == "sqlite":
db_config = get_relational_config()
LocalStorage.ensure_directory_exists(db_config.db_path)
print("Using database:", db_engine.db_uri)
config.set_section_option(
config.config_ini_section,
"SQLALCHEMY_DATABASE_URI",

View file

@ -16,7 +16,7 @@ from cognee.modules.users.methods import create_default_user, delete_user
revision: str = '482cd6517ce4'
down_revision: Union[str, None] = '8057ae7329c2'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = "8057ae7329c2"
def upgrade() -> None:

View file

@ -43,13 +43,13 @@ async def lifespan(app: FastAPI):
# from cognee.modules.data.deletion import prune_system, prune_data
# await prune_data()
# await prune_system(metadata = True)
if app_environment == "local" or app_environment == "dev":
from cognee.infrastructure.databases.relational import get_relational_engine
db_engine = get_relational_engine()
await db_engine.create_database()
# if app_environment == "local" or app_environment == "dev":
from cognee.infrastructure.databases.relational import get_relational_engine
db_engine = get_relational_engine()
await db_engine.create_database()
from cognee.modules.users.methods import get_default_user
await get_default_user()
from cognee.modules.users.methods import get_default_user
await get_default_user()
yield

View file

@ -21,7 +21,7 @@ class config():
graph_config.graph_file_path = os.path.join(databases_directory_path, "cognee.graph")
vector_config = get_vectordb_config()
if vector_config.vector_engine_provider == "lancedb":
if vector_config.vector_db_provider == "lancedb":
vector_config.vector_db_url = os.path.join(databases_directory_path, "cognee.lancedb")
@staticmethod
@ -126,9 +126,9 @@ class config():
@staticmethod
def set_vector_engine_provider(vector_engine_provider: str):
def set_vector_db_provider(vector_db_provider: str):
vector_db_config = get_vectordb_config()
vector_db_config.vector_engine_provider = vector_engine_provider
vector_db_config.vector_db_provider = vector_db_provider
@staticmethod
def set_relational_db_config(config_dict: dict):

View file

@ -9,7 +9,7 @@ class VectorConfig(BaseSettings):
"cognee.lancedb"
)
vector_db_key: str = ""
vector_engine_provider: str = "lancedb"
vector_db_provider: str = "lancedb"
model_config = SettingsConfigDict(env_file = ".env", extra = "allow")
@ -17,7 +17,7 @@ class VectorConfig(BaseSettings):
return {
"vector_db_url": self.vector_db_url,
"vector_db_key": self.vector_db_key,
"vector_db_provider": self.vector_engine_provider,
"vector_db_provider": self.vector_db_provider,
}
@lru_cache

View file

@ -108,11 +108,12 @@ class WeaviateAdapter(VectorDBInterface):
filters = Filter.by_id().contains_any(data_point_ids)
)
for data_point in data_points:
for data_point in data_points.objects:
data_point.payload = data_point.properties
data_point.id = data_point.uuid
del data_point.properties
future.set_result(data_points)
future.set_result(data_points.objects)
return await future

View file

@ -4,7 +4,6 @@ import os
from pathlib import Path
from typing import List, Type
import aiofiles
import openai
import instructor
from pydantic import BaseModel
@ -13,9 +12,7 @@ from tenacity import retry, stop_after_attempt
from cognee.base_config import get_base_config
from cognee.infrastructure.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.prompts import read_query_prompt
from cognee.shared.data_models import MonitoringTool
import logging
logging.basicConfig(level=logging.DEBUG)
# from cognee.shared.data_models import MonitoringTool
class OpenAIAdapter(LLMInterface):
name = "OpenAI"

View file

@ -103,8 +103,8 @@ def get_settings() -> SettingsDict:
},
vector_db = {
"provider": {
"label": vector_config.vector_engine_provider,
"value": vector_config.vector_engine_provider.lower(),
"label": vector_config.vector_db_provider,
"value": vector_config.vector_db_provider.lower(),
},
"url": vector_config.vector_db_url,
"api_key": vector_config.vector_db_key,

View file

@ -12,4 +12,4 @@ async def save_vector_db_config(vector_db_config: VectorDBConfig):
vector_config.vector_db_url = vector_db_config.url
vector_config.vector_db_key = vector_db_config.api_key
vector_config.vector_engine_provider = vector_db_config.provider
vector_config.vector_db_provider = vector_db_config.provider

View file

@ -25,7 +25,8 @@ class User(SQLAlchemyBaseUserTableUUID, Principal):
from fastapi_users import schemas
class UserRead(schemas.BaseUser[uuid_UUID]):
groups: list[uuid_UUID] # Add groups attribute
# groups: list[uuid_UUID] # Add groups attribute
pass
class UserCreate(schemas.BaseUserCreate):
pass

View file

@ -14,7 +14,7 @@ async def chunk_update_check(data_chunks: list[DocumentChunk], collection_name:
[str(chunk.chunk_id) for chunk in data_chunks],
)
existing_chunks_map = {chunk.id: chunk.payload for chunk in existing_chunks}
existing_chunks_map = {str(chunk.id): chunk.payload for chunk in existing_chunks}
affected_data_chunks = []

View file

@ -23,7 +23,7 @@ async def query_graph_connections(query: str, exploration_levels = 1) -> list[(s
exact_node = await graph_engine.extract_node(node_id)
if exact_node is not None and "uuid" in exact_node:
node_connections = await graph_engine.get_connections(exact_node["uuid"])
node_connections = await graph_engine.get_connections(str(exact_node["uuid"]))
else:
vector_engine = get_vector_engine()
results = await asyncio.gather(
@ -37,7 +37,7 @@ async def query_graph_connections(query: str, exploration_levels = 1) -> list[(s
return []
node_connections_results = await asyncio.gather(
*[graph_engine.get_connections(result.payload["uuid"]) for result in relevant_results]
*[graph_engine.get_connections(str(result.payload["uuid"])) for result in relevant_results]
)
node_connections = []

View file

@ -9,7 +9,7 @@ from cognee.api.v1.search import SearchType
logging.basicConfig(level=logging.DEBUG)
async def main():
cognee.config.set_vector_engine_provider("qdrant")
cognee.config.set_vector_db_provider("qdrant")
data_directory_path = str(pathlib.Path(os.path.join(pathlib.Path(__file__).parent, ".data_storage/test_qdrant")).resolve())
cognee.config.data_root_directory(data_directory_path)
cognee_directory_path = str(pathlib.Path(os.path.join(pathlib.Path(__file__).parent, ".cognee_system/test_qdrant")).resolve())

View file

@ -7,7 +7,7 @@ from cognee.api.v1.search import SearchType
logging.basicConfig(level=logging.DEBUG)
async def main():
cognee.config.set_vector_engine_provider("weaviate")
cognee.config.set_vector_db_provider("weaviate")
data_directory_path = str(pathlib.Path(os.path.join(pathlib.Path(__file__).parent, ".data_storage/test_weaviate")).resolve())
cognee.config.data_root_directory(data_directory_path)
cognee_directory_path = str(pathlib.Path(os.path.join(pathlib.Path(__file__).parent, ".cognee_system/test_weaviate")).resolve())

View file

@ -3,8 +3,19 @@
echo "Debug mode: $DEBUG"
echo "Environment: $ENVIRONMENT"
# Run migrations
poetry run alembic upgrade head
# # Run Alembic migrations
# echo "Running database migrations..."
# poetry run alembic upgrade head
# # Check if the migrations were successful
# if [ $? -eq 0 ]; then
# echo "Migrations completed successfully."
# else
# echo "Migration failed, exiting."
# exit 1
# fi
echo "Starting Gunicorn"

Binary file not shown.

14
examples/node/fetch.js Normal file
View file

@ -0,0 +1,14 @@
import nodeFetch from 'node-fetch';
import handleServerErrors from './handleServerErrors.js';
export default function fetch(url, options = {}, token) {
return nodeFetch('http://127.0.0.1:8000/api' + url, {
...options,
headers: {
...options.headers,
'Authorization': `Bearer ${token}`,
},
})
.then(handleServerErrors)
.catch(handleServerErrors);
}

View file

@ -0,0 +1,16 @@
export default function handleServerErrors(response) {
return new Promise((resolve, reject) => {
if (response.status === 401) {
return reject(new Error('Unauthorized'));
}
if (!response.ok) {
if (response.json) {
return response.json().then(error => reject(error));
} else {
return reject(response.detail || response.body || response);
}
}
return resolve(response);
});
}

122
examples/node/main.js Normal file
View file

@ -0,0 +1,122 @@
import fs from 'fs';
import FormData from 'form-data';
import fetch from './fetch.js';
async function run() {
try {
// Default user is created automatically, you can create a new user if needed.
// const registerResponse = await fetch('/v1/auth/register', {
// method: 'POST',
// body: {
// email: 'default_user@example.com',
// password: 'default_password',
// is_active: true,
// is_superuser: true,
// is_verified: true
// },
// headers: {
// 'Content-Type': 'application/json',
// },
// });
// const user = await registerResponse.json();
const authCredentials = new FormData();
authCredentials.append('username', 'default_user@example.com');
authCredentials.append('password', 'default_password');
const loginResponse = await fetch('/v1/auth/login', {
method: 'POST',
body: authCredentials,
});
const bearer = await loginResponse.json();
const token = bearer.access_token;
const response = await fetch('/v1/datasets', {}, token);
const datasets = await response.json();
console.log(datasets);
const files = [
fs.createReadStream('../data/artificial_intelligence.pdf'),
];
const addData = new FormData();
files.forEach((file) => {
addData.append('data', file, file.name);
})
addData.append('datasetId', 'main');
await fetch('/v1/add', {
method: 'POST',
body: addData,
headers: addData.getHeaders(),
}, token);
await fetch('/v1/cognify', {
method: 'POST',
body: JSON.stringify({
datasets: ['main'],
}),
headers: {
'Content-Type': 'application/json',
}
}, token);
const graphResponse = await fetch('/v1/datasets/main/graph', {
method: 'GET',
}, token);
const graphUrl = await graphResponse.text();
console.log('Graph URL:', graphUrl);
// Search for summaries
const summariesResponse = await fetch('/v1/search', {
method: 'POST',
body: JSON.stringify({
searchType: 'SUMMARIES',
query: 'Artificial Intelligence',
}),
headers: {
'Content-Type': 'application/json',
}
}, token);
const summariesResults = await summariesResponse.json();
console.log('Summaries Results:', summariesResults);
// Search for chunks
const chunksResponse = await fetch('/v1/search', {
method: 'POST',
body: JSON.stringify({
searchType: 'CHUNKS',
query: 'Artificial Intelligence',
}),
headers: {
'Content-Type': 'application/json',
}
}, token);
const chunksResults = await chunksResponse.json();
console.log('Chunks Results:', chunksResults);
// Search for insights
const insightsResponse = await fetch('/v1/search', {
method: 'POST',
body: JSON.stringify({
searchType: 'INSIGHTS',
query: 'Artificial Intelligence',
}),
headers: {
'Content-Type': 'application/json',
}
}, token);
const insightsResults = await insightsResponse.json();
console.log('Insights Results:', insightsResults);
} catch (error) {
console.error('Error:', error);
}
}
run();

156
examples/node/package-lock.json generated Normal file
View file

@ -0,0 +1,156 @@
{
"name": "node-example",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "node-example",
"version": "1.0.0",
"dependencies": {
"form-data": "^4.0.1",
"node-fetch": "^3.3.2"
}
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/data-uri-to-buffer": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz",
"integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==",
"engines": {
"node": ">= 12"
}
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/fetch-blob": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz",
"integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "paypal",
"url": "https://paypal.me/jimmywarting"
}
],
"dependencies": {
"node-domexception": "^1.0.0",
"web-streams-polyfill": "^3.0.3"
},
"engines": {
"node": "^12.20 || >= 14.13"
}
},
"node_modules/form-data": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz",
"integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/formdata-polyfill": {
"version": "4.0.10",
"resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz",
"integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==",
"dependencies": {
"fetch-blob": "^3.1.2"
},
"engines": {
"node": ">=12.20.0"
}
},
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/node-domexception": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
"integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "github",
"url": "https://paypal.me/jimmywarting"
}
],
"engines": {
"node": ">=10.5.0"
}
},
"node_modules/node-fetch": {
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz",
"integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==",
"dependencies": {
"data-uri-to-buffer": "^4.0.0",
"fetch-blob": "^3.1.4",
"formdata-polyfill": "^4.0.10"
},
"engines": {
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/node-fetch"
}
},
"node_modules/web-streams-polyfill": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz",
"integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==",
"engines": {
"node": ">= 8"
}
}
}
}

View file

@ -0,0 +1,14 @@
{
"type": "module",
"name": "node-example",
"version": "1.0.0",
"description": "Node example calling Cognee API",
"main": "main.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"dependencies": {
"form-data": "^4.0.1",
"node-fetch": "^3.3.2"
}
}

View file

@ -548,7 +548,7 @@
"#GRAPH_DATABASE_USERNAME=\"\"\n",
"#GRAPH_DATABASE_PASSWORD=\"\"\n",
"\n",
"os.environ[\"VECTOR_ENGINE_PROVIDER\"]=\"lancedb\" # \"qdrant\", \"weaviate\" or \"lancedb\"\n",
"os.environ[\"VECTOR_DB_PROVIDER\"]=\"lancedb\" # \"qdrant\", \"weaviate\" or \"lancedb\"\n",
"# Not needed if using \"lancedb\"\n",
"# os.environ[\"VECTOR_DB_URL\"]=\"\"\n",
"# os.environ[\"VECTOR_DB_KEY\"]=\"\"\n",

12
poetry.lock generated
View file

@ -1350,13 +1350,13 @@ files = [
[[package]]
name = "dlt"
version = "1.0.0"
version = "1.2.0"
description = "dlt is an open-source python-first scalable data loading library that does not require any backend to run."
optional = false
python-versions = "<3.13,>=3.8.1"
files = [
{file = "dlt-1.0.0-py3-none-any.whl", hash = "sha256:730474cadcbc8151854d2c2999099225df3fe2b03fcfe716bc42e0b1a7707484"},
{file = "dlt-1.0.0.tar.gz", hash = "sha256:757ca3b1fe19d47720f22ad45d0642077ccafe2e64094ef30da478ca50a392c4"},
{file = "dlt-1.2.0-py3-none-any.whl", hash = "sha256:85256c0f87fe3cc1eedc390e6e3a31820250ac1f75bb9510bcf4085d069427ce"},
{file = "dlt-1.2.0.tar.gz", hash = "sha256:3e3c8604ea2fb213f0901cecab018909570824e5addbb45954c2c274f1439b2c"},
]
[package.dependencies]
@ -1397,12 +1397,12 @@ clickhouse = ["adlfs (>=2022.4.0)", "clickhouse-connect (>=0.7.7)", "clickhouse-
databricks = ["databricks-sql-connector (>=2.9.3)"]
deltalake = ["deltalake (>=0.19.0)", "pyarrow (>=12.0.0)"]
dremio = ["pyarrow (>=12.0.0)"]
duckdb = ["duckdb (>=0.6.1,<0.11)"]
duckdb = ["duckdb (>=0.9)"]
filesystem = ["botocore (>=1.28)", "s3fs (>=2022.4.0)"]
gcp = ["gcsfs (>=2022.4.0)", "google-cloud-bigquery (>=2.26.0)", "grpcio (>=1.50.0)"]
gs = ["gcsfs (>=2022.4.0)"]
lancedb = ["lancedb (>=0.8.2)", "pyarrow (>=12.0.0)", "tantivy (>=0.22.0)"]
motherduck = ["duckdb (>=0.6.1,<0.11)", "pyarrow (>=12.0.0)"]
motherduck = ["duckdb (>=0.9)", "pyarrow (>=12.0.0)"]
mssql = ["pyodbc (>=4.0.39)"]
parquet = ["pyarrow (>=12.0.0)"]
postgres = ["psycopg2-binary (>=2.9.1)", "psycopg2cffi (>=2.9.0)"]
@ -7765,4 +7765,4 @@ weaviate = ["weaviate-client"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9.0,<3.12"
content-hash = "9ce846c0cdd6f980fac43d22ea1f046f485fb42270a5cfe04fed061ea74a4f8c"
content-hash = "34f873038262904af283d31d4c3c68fd535ca7b36c6837fc97c0d1cc31cf89cb"

View file

@ -38,7 +38,7 @@ greenlet = "^3.0.3"
ruff = "^0.2.2"
filetype = "^1.2.0"
nltk = "^3.8.1"
dlt = {extras = ["sqlalchemy"], version = "^1.0.0"}
dlt = {extras = ["sqlalchemy"], version = "^1.2.0"}
overrides = "^7.7.0"
aiofiles = "^23.2.1"
qdrant-client = "^1.9.0"
@ -70,9 +70,6 @@ sentry-sdk = {extras = ["fastapi"], version = "^2.9.0"}
fastapi-users = { version = "*", extras = ["sqlalchemy"] }
asyncpg = "^0.29.0"
alembic = "^1.13.3"
pgvector = "^0.3.5"
[tool.poetry.extras]
filesystem = ["s3fs", "botocore"]
@ -99,7 +96,6 @@ mkdocs-jupyter = "^0.24.6"
mkdocs-minify-plugin = "^0.8.0"
mkdocs-redirects = "^1.2.1"
[tool.poetry.group.test-docs.dependencies]
fastapi = "^0.109.2"
diskcache = "^5.6.3"
@ -111,6 +107,7 @@ optional = true
[tool.poetry.group.postgres.dependencies]
psycopg2 = "^2.9.10"
pgvector = "^0.3.5"
[tool.ruff] # https://beta.ruff.rs/docs/
line-length = 100