fix: add python module resolution root and remove loading of env variables

This commit is contained in:
Boris Arzentar 2024-02-19 18:43:55 +01:00
parent 99b1073560
commit f2832fa6cd
20 changed files with 158 additions and 194 deletions

32
api.py
View file

@ -1,23 +1,9 @@
import json
import logging
import os
from enum import Enum
from typing import Dict, Any
import json
import uvicorn
from fastapi import FastAPI, BackgroundTasks, HTTPException
from fastapi.responses import JSONResponse
from pydantic import BaseModel
from cognitive_architecture.database.relationaldb.database import AsyncSessionLocal
from cognitive_architecture.database.relationaldb.database_crud import session_scope
from cognitive_architecture.vectorstore_manager import Memory
from dotenv import load_dotenv
from main import add_documents_to_graph_db, user_context_enrichment
from cognitive_architecture.config import Config
from fastapi import Depends
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
import logging
# Set up logging
logging.basicConfig(
@ -27,11 +13,23 @@ logging.basicConfig(
logger = logging.getLogger(__name__)
load_dotenv()
from cognitive_architecture.config import Config
config = Config()
config.load()
from typing import Dict, Any
from fastapi import FastAPI, BackgroundTasks, HTTPException
from fastapi.responses import JSONResponse
from pydantic import BaseModel
from cognitive_architecture.database.relationaldb.database import AsyncSessionLocal
from cognitive_architecture.database.relationaldb.database_crud import session_scope
from cognitive_architecture.vectorstore_manager import Memory
from main import add_documents_to_graph_db, user_context_enrichment
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
app = FastAPI(debug=True)
#
# from auth.cognito.JWTBearer import JWTBearer

View file

@ -29,7 +29,7 @@ class Config:
db_path = Path(__file__).resolve().parent / "database/data"
vectordb: str = os.getenv("VECTORDB", "weaviate")
db_type: str = os.getenv("DB_TYPE", "postgres")
db_type: str = os.getenv("DB_TYPE", "sqlite")
db_name: str = os.getenv("DB_NAME", "cognee.db")
db_host: str = os.getenv("DB_HOST", "localhost")
db_port: str = os.getenv("DB_PORT", "5432")

View file

@ -1,72 +0,0 @@
"""This module provides functionalities for creating and managing databases."""
import asyncio
import os
import logging
from contextlib import asynccontextmanager
from sqlalchemy.ext.asyncio import create_async_engine
from relationaldb.models import memory, metadatas, operation, sessions, user, docs
from sqlalchemy import create_engine, text
from dotenv import load_dotenv
from relationaldb.database import (
Base,get_sqlalchemy_database_url)
from cognitive_architecture.config import Config
config = Config()
config.load()
load_dotenv()
logger = logging.getLogger(__name__)
class DatabaseManager:
"""Manages database creation, deletion, and table initialization."""
def __init__(self):
"""Initialize the Database Url with a given configuration."""
self.engine = create_async_engine(get_sqlalchemy_database_url(), echo=True)
self.db_type = config.db_type
@asynccontextmanager
async def get_connection(self):
"""Initialize the DatabaseManager with a given configuration."""
if self.db_type in ["sqlite", "duckdb"]:
# For SQLite and DuckDB, the engine itself manages connections
yield self.engine
else:
async with self.engine.connect() as connection:
yield connection
async def database_exists(self, db_name):
"""Check if a database exists."""
if self.db_type in ["sqlite", "duckdb"]:
# For SQLite and DuckDB, check if the database file exists
return os.path.exists(db_name)
else:
query = text(f"SELECT 1 FROM pg_database WHERE datname='{db_name}'")
async with self.get_connection() as connection:
result = await connection.execute(query)
return await result.fetchone() is not None
async def create_database(self, db_name):
"""Create a new database."""
if self.db_type not in ["sqlite", "duckdb"]:
# For databases like PostgreSQL, create the database explicitly
async with self.get_connection() as connection:
await connection.execute(text(f"CREATE DATABASE {db_name}"))
async def drop_database(self, db_name):
"""Drop an existing database."""
if self.db_type in ["sqlite", "duckdb"]:
# For SQLite and DuckDB, simply remove the database file
os.remove(db_name)
else:
async with self.get_connection() as connection:
await connection.execute(text(f"DROP DATABASE IF EXISTS {db_name}"))
async def create_tables(self):
"""Create tables based on the SQLAlchemy Base metadata."""
async with self.engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)

Binary file not shown.

View file

@ -1,30 +1,67 @@
"""This module is used to create the database and tables for the cognitive architecture."""
import asyncio
import logging
"""This module provides functionalities for creating and managing databases."""
from dotenv import load_dotenv
from cognitive_architecture.config import Config
from cognitive_architecture.database.create_database import DatabaseManager
import os
from contextlib import asynccontextmanager
from sqlalchemy import create_engine, text
from sqlalchemy.ext.asyncio import create_async_engine
from config import Config
from database.relationaldb.database import Base, get_sqlalchemy_database_url
from database.relationaldb.models import memory, metadatas, operation, sessions, user, docs
globalConfig = Config()
class DatabaseManager:
"""Manages database creation, deletion, and table initialization."""
def __init__(self):
"""Initialize the Database Url with a given configuration."""
self.engine = create_async_engine(get_sqlalchemy_database_url("sqlite"), echo = True)
self.db_type = globalConfig.db_type
@asynccontextmanager
async def get_connection(self):
"""Initialize the DatabaseManager with a given configuration."""
if self.db_type in ["sqlite", "duckdb"]:
# For SQLite and DuckDB, the engine itself manages connections
yield self.engine
else:
async with self.engine.connect() as connection:
yield connection
async def database_exists(self, db_name):
"""Check if a database exists."""
if self.db_type in ["sqlite", "duckdb"]:
# For SQLite and DuckDB, check if the database file exists
return os.path.exists(db_name)
else:
query = text(f"SELECT 1 FROM pg_database WHERE datname='{db_name}'")
async with self.get_connection() as connection:
result = await connection.execute(query)
return await result.fetchone() is not None
async def create_database(self, db_name):
"""Create a new database."""
if self.db_type not in ["sqlite", "duckdb"]:
# For databases like PostgreSQL, create the database explicitly
async with self.get_connection() as connection:
await connection.execute(text(f"CREATE DATABASE {db_name}"))
async def drop_database(self, db_name):
"""Drop an existing database."""
if self.db_type in ["sqlite", "duckdb"]:
# For SQLite and DuckDB, simply remove the database file
os.remove(db_name)
else:
async with self.get_connection() as connection:
await connection.execute(text(f"DROP DATABASE IF EXISTS {db_name}"))
async def create_tables(self):
"""Create tables based on the SQLAlchemy Base metadata."""
try:
async with self.engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
except Exception as e:
print(e)
raise e
config = Config()
config.load()
load_dotenv()
logger = logging.getLogger(__name__)
async def main():
"""Runs as a part of startup docker scripts to create the database and tables."""
db_manager = DatabaseManager()
database_name = config.db_name
if not await db_manager.database_exists(database_name):
print(f"Database {database_name} does not exist. Creating...")
await db_manager.create_database(database_name)
print(f"Database {database_name} created successfully.")
await db_manager.create_tables()
if __name__ == "__main__":
asyncio.run(main())

View file

@ -10,7 +10,6 @@ import networkx as nx
from langchain.graphs import Neo4jGraph
import os
from dotenv import load_dotenv
import openai
import instructor
@ -41,7 +40,6 @@ from typing import Any, Dict, Optional, List
DEFAULT_PRESET = "promethai_chat"
preset_options = [DEFAULT_PRESET]
PROMETHAI_DIR = os.path.join(os.path.expanduser("~"), ".")
load_dotenv()
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
from ...config import Config

View file

@ -1,21 +1,25 @@
"""Database configuration and connection."""
from pathlib import Path
from contextlib import asynccontextmanager
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.orm import declarative_base, sessionmaker
from dotenv import load_dotenv
# from contextlib import asynccontextmanager
from sqlalchemy.ext.asyncio import create_async_engine, async_sessionmaker, AsyncSession
from sqlalchemy.orm import declarative_base
from cognitive_architecture.config import Config
config = Config()
load_dotenv()
globalConfig = Config()
# in seconds
MAX_RETRIES = 3
RETRY_DELAY = 5
def get_sqlalchemy_database_url(db_type='sqlite', db_name=config.db_name, base_path=config.db_path, user=config.db_user, password=config.db_password, host=config.db_host, port=config.db_port):
def get_sqlalchemy_database_url(
db_type = globalConfig.db_type,
db_name = globalConfig.db_name,
base_path = globalConfig.db_path,
user = globalConfig.db_user,
password = globalConfig.db_password,
host = globalConfig.db_host,
port = globalConfig.db_port,
):
"""Get the SQLAlchemy database URL based on parameters."""
db_path = (Path(base_path) / db_name).absolute()
if db_type == "sqlite":
@ -39,11 +43,11 @@ SQLALCHEMY_DATABASE_URL = get_sqlalchemy_database_url()
engine = create_async_engine(
SQLALCHEMY_DATABASE_URL,
pool_recycle=3600,
echo=config.sqlalchemy_logging,
echo=globalConfig.sqlalchemy_logging,
)
AsyncSessionLocal = sessionmaker(
AsyncSessionLocal = async_sessionmaker(
bind=engine,
class_=AsyncSession,
expire_on_commit=False,
@ -51,13 +55,13 @@ AsyncSessionLocal = sessionmaker(
Base = declarative_base()
@asynccontextmanager
async def get_db():
"""Provide a database session to the context."""
db = AsyncSessionLocal()
try:
yield db
finally:
await db.close()
# @asynccontextmanager
# async def get_db():
# """Provide a database session to the context."""
# db = AsyncSessionLocal()
# try:
# yield db
# finally:
# await db.close()

View file

@ -1,9 +1,6 @@
# memory.py
from datetime import datetime
from sqlalchemy import Column, String, DateTime, ForeignKey
from sqlalchemy.orm import relationship
import os
import sys
from ..database import Base

View file

@ -11,7 +11,6 @@ print(os.getcwd())
logging.basicConfig(level=logging.INFO)
# import marvin
import requests
from dotenv import load_dotenv
from langchain.document_loaders import PyPDFLoader
from langchain.retrievers import WeaviateHybridSearchRetriever
from weaviate.gql.get import HybridFusion
@ -24,7 +23,6 @@ from cognitive_architecture.database.relationaldb.models.docs import DocsModel
from sqlalchemy.orm import sessionmaker
from cognitive_architecture.database.relationaldb.database import engine
load_dotenv()
from typing import Optional
import time
import tracemalloc

View file

@ -16,16 +16,9 @@ import tracemalloc
tracemalloc.start()
import os
from langchain.embeddings.openai import OpenAIEmbeddings
from dotenv import load_dotenv
from langchain.schema import Document
import weaviate
load_dotenv()
from ...config import Config
config = Config()
config.load()
LTM_MEMORY_ID_DEFAULT = "00000"
ST_MEMORY_ID_DEFAULT = "0000"
BUFFER_ID_DEFAULT = "0000"
@ -371,11 +364,6 @@ class WeaviateVectorDB(VectorDB):
.with_hybrid(query=observation, fusion_type=HybridFusion.RELATIVE_SCORE)
)
query_output = base_query.do()
# from weaviate.classes import Filter
# client = weaviate.connect_to_wcs(
# cluster_url=config.weaviate_url,
# auth_credentials=weaviate.AuthApiKey(config.weaviate_api_key)
# )
return query_output
elif search_type == "generate":

View file

@ -53,7 +53,7 @@ ENV_FILE_PATH = os.path.abspath("../.env")
if os.path.exists(ENV_FILE_PATH):
# Load default environment variables (.env)
load_dotenv()
print("Cognee is already running...")
print("Environment variables are already loaded.")
else:
fetch_secret(
f"promethai-{environment}-backend-secretso-promethaijs-dotenv",

View file

@ -1,7 +1,5 @@
import os
from dotenv import load_dotenv
from ..shared.data_models import Node, Edge, KnowledgeGraph, GraphQLQuery, MemorySummary
from ..config import Config
import instructor
@ -17,7 +15,6 @@ OPENAI_API_KEY = config.openai_key
aclient = instructor.patch(OpenAI())
load_dotenv()
import logging

View file

@ -0,0 +1,27 @@
"""This module is used to create the database and tables for the cognitive architecture."""
import logging
logger = logging.getLogger(__name__)
async def main():
"""Runs as a part of startup docker scripts to create the database and tables."""
from config import Config
config = Config()
config.load()
from database.database_manager import DatabaseManager
db_manager = DatabaseManager()
database_name = config.db_name
if not await db_manager.database_exists(database_name):
print(f"Database {database_name} does not exist. Creating...")
await db_manager.create_database(database_name)
print(f"Database {database_name} created successfully.")
await db_manager.create_tables()
if __name__ == "__main__":
import asyncio
asyncio.run(main())

View file

@ -2,9 +2,6 @@ import boto3
from botocore.exceptions import BotoCoreError, ClientError
from langdetect import detect, LangDetectException
import iso639
from dotenv import load_dotenv
load_dotenv()
import logging

View file

@ -13,10 +13,6 @@ from cognitive_architecture.database.relationaldb.models.user import User
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
import logging
from cognitive_architecture.database.relationaldb.database import AsyncSessionLocal
from dotenv import load_dotenv
load_dotenv()
class Node:

View file

@ -1,21 +1,19 @@
import logging
from sqlalchemy.future import select
logging.basicConfig(level=logging.INFO)
from sqlalchemy.future import select
from cognitive_architecture.database.relationaldb.models.user import User
from cognitive_architecture.database.relationaldb.models.memory import MemoryModel
import ast
import tracemalloc
from cognitive_architecture.database.relationaldb.database_crud import add_entity
tracemalloc.start()
from dotenv import load_dotenv
import uuid
from cognitive_architecture.database.vectordb.basevectordb import BaseMemory
from cognitive_architecture.config import Config
load_dotenv()
config = Config()
config.load()
globalConfig = Config()
class DynamicBaseMemory(BaseMemory):
def __init__(
@ -121,7 +119,7 @@ class Memory:
user_id: str = "676",
session=None,
index_name: str = None,
db_type: str = config.vectordb,
db_type: str = globalConfig.vectordb,
namespace: str = None,
memory_id: str = None,
memory_class=None,
@ -142,9 +140,8 @@ class Memory:
# )
def load_environment_variables(self) -> None:
load_dotenv()
self.OPENAI_TEMPERATURE = config.openai_temperature
self.OPENAI_API_KEY = config.openai_key
self.OPENAI_TEMPERATURE = globalConfig.openai_temperature
self.OPENAI_API_KEY = globalConfig.openai_key
@classmethod
async def create_memory(
@ -194,7 +191,7 @@ class Memory:
user_id,
str(memory_id),
index_name=memory_label,
db_type=config.vectordb,
db_type=globalConfig.vectordb,
**kwargs,
)

View file

@ -25,6 +25,7 @@ services:
environment:
- HOST=0.0.0.0
- ENVIRONMENT=local
- PYTHONPATH=.
profiles: ["exclude-from-up"]
ports:
- 8000:8000
@ -32,8 +33,9 @@ services:
- 80:80
- 50051:50051
- 5678:5678
- 5432:5432
depends_on:
- postgres
# - postgres
- neo4j
deploy:
resources:
@ -41,18 +43,18 @@ services:
cpus: "4.0"
memory: 8GB
postgres:
image: postgres
container_name: postgres
environment:
- POSTGRES_HOST_AUTH_METHOD=trust
- POSTGRES_USER=bla
- POSTGRES_PASSWORD=bla
- POSTGRES_DB=bubu
networks:
- cognee_backend
ports:
- "5432:5432"
# postgres:
# image: postgres
# container_name: postgres
# environment:
# - POSTGRES_HOST_AUTH_METHOD=trust
# - POSTGRES_USER=bla
# - POSTGRES_PASSWORD=bla
# - POSTGRES_DB=bubu
# networks:
# - cognee_backend
# ports:
# - "5432:5432"
networks:
cognee_backend:

View file

@ -17,11 +17,11 @@ else
echo '"local" environment is active, skipping fetch_secret.py'
fi
echo "Running create_database.py"
echo "Creating database..."
python cognitive_architecture/database/create_database.py
python cognitive_architecture/setup_database.py
if [ $? -ne 0 ]; then
echo "Error: create_database.py failed"
echo "Error: setup_database.py failed"
exit 1
fi

View file

@ -5,7 +5,6 @@ from pydantic import BaseModel, Field
from cognitive_architecture.database.graphdb.graph import Neo4jGraphDB
from cognitive_architecture.database.relationaldb.models.memory import MemoryModel
import os
from dotenv import load_dotenv
from cognitive_architecture.database.relationaldb.database_crud import (
session_scope,
update_entity_graph_summary,
@ -38,12 +37,12 @@ aclient = instructor.patch(OpenAI())
DEFAULT_PRESET = "promethai_chat"
preset_options = [DEFAULT_PRESET]
PROMETHAI_DIR = os.path.join(os.path.expanduser("~"), ".")
load_dotenv()
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
from cognitive_architecture.config import Config
config = Config()
config.load()
from cognitive_architecture.utils import get_document_names
from sqlalchemy.orm import selectinload, joinedload, contains_eager
import logging

View file

@ -63,6 +63,7 @@ debugpy = "^1.8.0"
lancedb = "^0.5.5"
pyarrow = "^15.0.0"
pylint = "^3.0.3"
aiosqlite = "^0.19.0"
[build-system]
requires = ["poetry-core"]