chore: add debugpy and update readme

This commit is contained in:
Boris Arzentar 2024-02-15 10:13:19 +01:00
parent 817df19bc7
commit 47c3463406
16 changed files with 191 additions and 370 deletions

1
.gitignore vendored
View file

@ -162,3 +162,4 @@ cython_debug/
# option (not recommended) you can uncomment the following to ignore the entire idea folder. # option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/ #.idea/
.vscode/

View file

@ -1,13 +1,15 @@
FROM python:3.11 FROM python:3.11
# Set build argument # Set build argument
ARG DEBUG
ARG API_ENABLED ARG API_ENABLED
# Set environment variable based on the build argument # Set environment variable based on the build argument
ENV API_ENABLED=${API_ENABLED} \ ENV DEBUG=${DEBUG}
PIP_NO_CACHE_DIR=true ENV API_ENABLED=${API_ENABLED}
ENV PIP_NO_CACHE_DIR=true
ENV PATH="${PATH}:/root/.poetry/bin" ENV PATH="${PATH}:/root/.poetry/bin"
RUN pip install poetry RUN pip install poetry
WORKDIR /app WORKDIR /app
@ -15,6 +17,7 @@ COPY pyproject.toml poetry.lock /app/
# Install the dependencies # Install the dependencies
RUN poetry config virtualenvs.create false && \ RUN poetry config virtualenvs.create false && \
poetry lock --no-update && \
poetry install --no-root --no-dev poetry install --no-root --no-dev
RUN apt-get update -q && \ RUN apt-get update -q && \
@ -37,15 +40,13 @@ RUN apt-get update -q && \
/tmp/* \ /tmp/* \
/var/tmp/* /var/tmp/*
WORKDIR /app WORKDIR /app
COPY cognitive_architecture/ /app/cognitive_architecture COPY cognitive_architecture/ /app/cognitive_architecture
COPY main.py /app COPY main.py /app
COPY api.py /app COPY api.py /app
COPY entrypoint.sh /app/entrypoint.sh COPY entrypoint.sh /app/entrypoint.sh
RUN chmod +x /app/entrypoint.sh RUN chmod +x /app/entrypoint.sh
ENTRYPOINT ["/app/entrypoint.sh"] ENTRYPOINT ["/app/entrypoint.sh"]

188
README.md
View file

@ -2,107 +2,114 @@
AI Applications and RAGs - Cognitive Architecture, Testability, Production Ready Apps AI Applications and RAGs - Cognitive Architecture, Testability, Production Ready Apps
<p>
<a href="https://cognee.ai" target="_blank">
<p align="left"> <img src="assets/cognee-logo.png" width="160px" alt="Cognee logo" />
<a href="https://prometh.ai//#gh-light-mode-only">
<img src="assets/topoteretes_logo.png" width="5%" alt="promethAI logo" />
</a> </a>
</p> </p>
<p align="left"><i>Open-source framework for building and testing RAGs and Cognitive Architectures, designed for accuracy, transparency, and control.</i></p> <p>
<i>Open-source framework for building and testing RAGs and Cognitive Architectures, designed for accuracy, transparency, and control.</i>
<p align="left">
<a href="https://github.com/topoteretes/cognee/fork" target="blank">
<img src="https://img.shields.io/github/forks/topoteretes/cognee?style=for-the-badge" alt="cognee forks"/>
</a>
<a href="https://github.com/topoteretes/cognee/stargazers" target="blank">
<img src="https://img.shields.io/github/stars/topoteretes/cognee?style=for-the-badge" alt="cognee stars"/>
</a>
<a href="https://github.com/topoteretes/cognee/pulls" target="blank">
<img src="https://img.shields.io/github/issues-pr/topoteretes/cognee?style=for-the-badge" alt="cognee pull-requests"/>
</a>
<a href='https://github.com/topoteretes/cognee/releases'>
<img src='https://img.shields.io/github/release/topoteretes/cognee?&label=Latest&style=for-the-badge'>
</a>
</p> </p>
[//]: # (<p align="center"><b>Follow PromethAI </b></p>) <p>
<a href="https://github.com/topoteretes/cognee/fork">
[//]: # (<p align="center">) <img src="https://img.shields.io/github/forks/topoteretes/cognee?style=for-the-badge" alt="cognee forks"/>
</a>
[//]: # (<a href="https://twitter.com/_promethAI" target="blank">) <a href="https://github.com/topoteretes/cognee/stargazers">
<img src="https://img.shields.io/github/stars/topoteretes/cognee?style=for-the-badge" alt="cognee stars"/>
[//]: # (<img src="https://img.shields.io/twitter/follow/_promethAI?label=Follow: _promethAI&style=social" alt="Follow _promethAI"/>) </a>
<a href="https://github.com/topoteretes/cognee/pulls">
[//]: # (</a>) <img src="https://img.shields.io/github/issues-pr/topoteretes/cognee?style=for-the-badge" alt="cognee pull-requests"/>
</a>
[//]: # (<p align="center">) <a href="https://github.com/topoteretes/cognee/releases">
<img src="https://img.shields.io/github/release/topoteretes/cognee?&label=Latest&style=for-the-badge" alt="cognee releases>
[//]: # (<a href="https://prometh.ai" target="_blank"><img src="https://img.shields.io/twitter/url?label=promethAI Website&logo=website&style=social&url=https://github.com/topoteretes/PromethAI-Memory"/></a>) </a>
</p>
[//]: # (<p align="center">)
[//]: # (<a href="https://www.youtube.com/@_promethAI" target="_blank"><img src="https://img.shields.io/twitter/url?label=Youtube&logo=youtube&style=social&url=https://github.com/topoteretes/PromethAI-Memory"/></a>)
[//]: # (</p>)
<p align="left"><b>Share cognee Repository</b></p>
<p align="left">
<a href="https://twitter.com/intent/tweet?text=Check%20this%20GitHub%20repository%20out.%20promethAI%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.&url=https://github.com/topoteretes/cognee&hashtags=promethAI,AGI,Autonomics,future" target="blank">
<img src="https://img.shields.io/twitter/follow/_promethAI?label=Share Repo on Twitter&style=social" alt="Follow _promethAI"/></a>
<a href="https://t.me/share/url?text=Check%20this%20GitHub%20repository%20out.%20promethAI%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.&url=https://github.com/topoteretes/cognee" target="_blank"><img src="https://img.shields.io/twitter/url?label=Telegram&logo=Telegram&style=social&url=https://github.com/topoteretes/cognee" alt="Share on Telegram"/></a>
<a href="https://api.whatsapp.com/send?text=Check%20this%20GitHub%20repository%20out.%20promethAI%20-%20Let's%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.%20https://github.com/topoteretes/cognee"><img src="https://img.shields.io/twitter/url?label=whatsapp&logo=whatsapp&style=social&url=https://github.com/topoteretes/cognee" /></a> <a href="https://www.reddit.com/submit?url=https://github.com/topoteretes/cognee&title=Check%20this%20GitHub%20repository%20out.%20promethAI%20-%20Let's%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.
" target="blank">
<img src="https://img.shields.io/twitter/url?label=Reddit&logo=Reddit&style=social&url=https://github.com/topoteretes/cognee" alt="Share on Reddit"/>
</a> <a href="mailto:?subject=Check%20this%20GitHub%20repository%20out.&body=promethAI%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.%3A%0Ahttps://github.com/topoteretes/cognee" target="_blank"><img src="https://img.shields.io/twitter/url?label=Gmail&logo=Gmail&style=social&url=https://github.com/topoteretes/cognee"/></a> <a href="https://www.buymeacoffee.com/promethAI" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/default-orange.png" alt="Buy Me A Coffee" height="23" width="100" style="border-radius:1px"></a>
<p>
<b>Share cognee Repository</b>
</p>
<p>
<a href="https://twitter.com/intent/tweet?text=Check%20this%20GitHub%20repository%20out.%20Cognee%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.&url=https://github.com/topoteretes/cognee&hashtags=AGI,Autonomics,Cognee,future" target="_blank">
<img src="https://img.shields.io/twitter/follow/_promethAI?label=Share Repo on Twitter&style=social" alt="Follow Cognee"/>
</a>
<a href="https://t.me/share/url?text=Check%20this%20GitHub%20repository%20out.%20Cognee%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.&url=https://github.com/topoteretes/cognee" target="_blank">
<img src="https://img.shields.io/twitter/url?label=Telegram&logo=Telegram&style=social&url=https://github.com/topoteretes/cognee" alt="Share on Telegram"/>
</a>
<a href="https://api.whatsapp.com/send?text=Check%20this%20GitHub%20repository%20out.%20Cognee%20-%20Let's%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.%20https://github.com/topoteretes/cognee" target="_blank">
<img src="https://img.shields.io/twitter/url?label=whatsapp&logo=whatsapp&style=social&url=https://github.com/topoteretes/cognee" />
</a>
<a href="https://www.reddit.com/submit?url=https://github.com/topoteretes/cognee&title=Check%20this%20GitHub%20repository%20out.%20Cognee%20-%20Let's%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.
" target="_blank">
<img src="https://img.shields.io/twitter/url?label=Reddit&logo=Reddit&style=social&url=https://github.com/topoteretes/cognee" alt="Share on Reddit"/>
</a>
<a href="mailto:?subject=Check%20this%20GitHub%20repository%20out.&body=Cognee%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.%3A%0Ahttps://github.com/topoteretes/cognee" target="_blank">
<img src="https://img.shields.io/twitter/url?label=Gmail&logo=Gmail&style=social&url=https://github.com/topoteretes/cognee"/>
</a>
<a href="https://www.buymeacoffee.com/promethAI" target="_blank">
<img src="https://cdn.buymeacoffee.com/buttons/default-orange.png" alt="Buy Me A Coffee" height="23" width="100" style="border-radius:1px">
</a>
</p> </p>
<hr> <hr>
[Star us on Github!](https://www.github.com/topoteretes/cognee) [Star us on Github!](https://www.github.com/topoteretes/cognee)
Jump into the world of RAG architecture, inspired by human cognitive processes, using Python. Jump into the world of RAG architecture, inspired by human cognitive processes, using Python.
[cognee](www.cognee.ai) runs in iterations, from POC towards production ready code. <a href="https://www.cognee.ai" target="_blank">Cognee</a> runs in iterations, from POC towards production ready code.
To read more about the approach and details on cognitive architecture, see the blog post: [AI Applications and RAGs - Cognitive Architecture, Testability, Production Ready Apps](https://topoteretes.notion.site/Going-beyond-Langchain-Weaviate-and-towards-a-production-ready-modern-data-platform-7351d77a1eba40aab4394c24bef3a278?pvs=4) To read more about the approach and details on cognitive architecture, see the blog post: <a href="https://topoteretes.notion.site/Going-beyond-Langchain-Weaviate-and-towards-a-production-ready-modern-data-platform-7351d77a1eba40aab4394c24bef3a278?pvs=4" target="_blank">AI Applications and RAGs - Cognitive Architecture, Testability, Production Ready Apps</a>
Try it yourself on Whatsapp with one of our [partners](www.keepi.ai) by typing /save _content_ followed by /query _content_ Try it yourself on Whatsapp with one of our <a href="https://keepi.ai">partners</a> by typing `/save {content you want to save}` followed by `/query {knowledge you saved previously}`
## Getting started
### Get Started in Moments In order to run cognee you need to have <a href="https://docs.docker.com/get-docker" target="_blank">Docker</a> installed on your machine.
Running [cognee](www.cognee.ai) is a breeze. Simply run `cp env.example .env` and `docker compose up cognee` in your terminal. Run <a href="https://www.cognee.ai" target="_blank">Cognee</a> in a couple of steps:
Send API requests add-memory, user-query-to-graph, document-to-graph-db, user-query-processor to the locahost:8000 - Run `cp .env.template .env` in your terminal and set all the environment variables
- Run `docker compose up` in order to start graph and relational databases
- Run `docker compose up cognee` in order start Cognee
### Current Focus <!-- Send API requests add-memory, user-query-to-graph, document-to-graph-db, user-query-processor to the locahost:8000 -->
#### Integration to keepi.ai and other apps ## Debugging
Uses Neo4j to map user preferences into a graph structure consisting of semantic, episodic, and procedural memory. In order to run Cognee with debugger attached you need to build the Cognee image with the `DEBUG` flag set to true.
Fetches information and stores information and files on Whatsapp chatbot using [keepi.ai](www.keepi.ai) - `docker compose build cognee --no-cache --build-arg DEBUG=true`
- `docker compose up cognee`
Uses the graph to answer user queries and store new information in the graph.
### Visual Studio Code debugger
### Architecture Add the following configuration to VSCode `Run and Debug` configurations array:
```json
![Image](https://github.com/topoteretes/PromethAI-Memory/blob/main/assets/img.png) {
"name": "Attach (remote debug)",
"type": "python",
"request": "attach",
"port": 5678,
"host": "127.0.0.1",
"pathMappings": [{
"localRoot": "${workspaceFolder}",
"remoteRoot": "."
}]
}
```
It should look like this:
<img src="assets/vscode-debug-config.png" width="500px" />
## Current Focus
### Integration with keepi.ai and other apps
- Cognee uses Neo4j graph database to map user data into a graph structure consisting of semantic, episodic, and procedural memory.
- Stores data and files through the WhatsApp chatbot <a href="https://keepi.ai">keepi.ai</a>
- Uses the graph to answer user queries and store new information in the graph.
## Architecture
### How Cognee Enhances Your Contextual Memory ### How Cognee Enhances Your Contextual Memory
@ -112,15 +119,6 @@ Our framework for the OpenAI, Graph (Neo4j) and Vector (Weaviate) databases intr
- Document Topology: Structure and store documents in public and private domains. - Document Topology: Structure and store documents in public and private domains.
- Personalized Context: Provide a context object to the LLM for a better response. - Personalized Context: Provide a context object to the LLM for a better response.
</br>
![Image](assets/architecture.png)

41
api.py
View file

@ -15,6 +15,9 @@ from cognitive_architecture.vectorstore_manager import Memory
from dotenv import load_dotenv from dotenv import load_dotenv
from main import add_documents_to_graph_db, user_context_enrichment from main import add_documents_to_graph_db, user_context_enrichment
from cognitive_architecture.config import Config from cognitive_architecture.config import Config
from fastapi import Depends
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
# Set up logging # Set up logging
logging.basicConfig( logging.basicConfig(
@ -25,7 +28,10 @@ logging.basicConfig(
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
load_dotenv() load_dotenv()
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
config = Config()
config.load()
app = FastAPI(debug=True) app = FastAPI(debug=True)
# #
# from auth.cognito.JWTBearer import JWTBearer # from auth.cognito.JWTBearer import JWTBearer
@ -33,31 +39,12 @@ app = FastAPI(debug=True)
# #
# auth = JWTBearer(jwks) # auth = JWTBearer(jwks)
from fastapi import Depends @app.get("/")
config = Config()
config.load()
@app.get(
"/",
)
"""
Root endpoint that returns a welcome message.
"""
async def root():
class ImageResponse(BaseModel):
success: bool
message: str
@app.get(
"/",
)
async def root(): async def root():
""" """
Root endpoint that returns a welcome message. Root endpoint that returns a welcome message.
""" """
return {"message": "Hello, World, I am alive!"} return { "message": "Hello, World, I am alive!" }
@app.get("/health") @app.get("/health")
@ -234,10 +221,11 @@ async def drop_db(payload: Payload):
else: else:
pass pass
from cognitive_architecture.database.create_database_tst import drop_database, create_admin_engine from cognitive_architecture.database.create_database import drop_database, create_admin_engine
engine = create_admin_engine(username, password, host, database_name) engine = create_admin_engine(username, password, host, database_name)
drop_database(engine) connection = engine.raw_connection()
drop_database(connection, database_name)
return JSONResponse(content={"response": "DB dropped"}, status_code=200) return JSONResponse(content={"response": "DB dropped"}, status_code=200)
else: else:
@ -249,10 +237,11 @@ async def drop_db(payload: Payload):
else: else:
pass pass
from cognitive_architecture.database.create_database_tst import create_database, create_admin_engine from cognitive_architecture.database.create_database import create_database, create_admin_engine
engine = create_admin_engine(username, password, host, database_name) engine = create_admin_engine(username, password, host, database_name)
create_database(engine) connection = engine.raw_connection()
create_database(connection, database_name)
return JSONResponse(content={"response": " DB drop"}, status_code=200) return JSONResponse(content={"response": " DB drop"}, status_code=200)

View file

Before

Width:  |  Height:  |  Size: 174 KiB

After

Width:  |  Height:  |  Size: 174 KiB

BIN
assets/cognee-logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

View file

Before

Width:  |  Height:  |  Size: 161 KiB

After

Width:  |  Height:  |  Size: 161 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

View file

@ -1,6 +1,7 @@
import os import os
import json import json
import configparser import configparser
import logging
import uuid import uuid
from typing import Optional, List, Dict, Any from typing import Optional, List, Dict, Any
from dataclasses import dataclass, field from dataclasses import dataclass, field

View file

@ -1,110 +1,63 @@
# this is needed to import classes from other modules import os
# script_dir = os.path.dirname(os.path.abspath(__file__)) import logging
# # Get the parent directory of your script and add it to sys.path import psycopg2
# parent_dir = os.path.dirname(script_dir) from dotenv import load_dotenv
# sys.path.append(parent_dir) from postgres.database import Base
from sqlalchemy import create_engine, text
from postgres.models import memory from postgres.models import memory
from postgres.models import metadatas from postgres.models import metadatas
from postgres.models import operation from postgres.models import operation
from postgres.models import sessions from postgres.models import sessions
from postgres.models import user from postgres.models import user
from postgres.models import docs from postgres.models import docs
# from cognitive_architecture.config import Config
# config = Config()
# config.load()
from postgres.database import Base
from sqlalchemy import create_engine, text
import psycopg2
from dotenv import load_dotenv
load_dotenv() load_dotenv()
import os logger = logging.getLogger(__name__)
import os
if os.environ.get('AWS_ENV') == 'prd' or os.environ.get('AWS_ENV') == 'dev':
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
elif os.environ.get('AWS_ENV') == 'local':
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
else:
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
def create_admin_engine(username, password, host, database_name): def create_admin_engine(username, password, host, database_name):
admin_url = f"postgresql://{username}:{password}@{host}:5432/{database_name}" admin_url = f"postgresql://{username}:{password}@{host}:5432/{database_name}"
return create_engine(admin_url) return create_engine(admin_url)
def database_exists(connection, db_name):
def database_exists(username, password, host, db_name):
engine = create_admin_engine(username, password, host, db_name)
connection = engine.connect()
query = text(f"SELECT 1 FROM pg_database WHERE datname='{db_name}'") query = text(f"SELECT 1 FROM pg_database WHERE datname='{db_name}'")
result = connection.execute(query).fetchone() result = connection.execute(query).fetchone()
connection.close()
engine.dispose()
return result is not None return result is not None
def create_database(connection, db_name):
def create_database(username, password, host, db_name):
engine = create_admin_engine(username, password, host, db_name)
connection = engine.raw_connection()
connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = connection.cursor() cursor = connection.cursor()
cursor.execute(f"CREATE DATABASE {db_name}") cursor.execute(f"CREATE DATABASE {db_name}")
cursor.close() cursor.close()
connection.close()
engine.dispose()
def drop_database(connection, db_name):
def drop_database(username, password, host, db_name):
engine = create_admin_engine(username, password, host)
connection = engine.raw_connection()
connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = connection.cursor() cursor = connection.cursor()
cursor.execute(f"DROP DATABASE IF EXISTS {db_name}") cursor.execute(f"DROP DATABASE IF EXISTS {db_name}")
cursor.close() cursor.close()
connection.close()
engine.dispose()
print(f"Database {db_name} dropped successfully.")
def create_tables(engine): def create_tables(engine):
Base.metadata.create_all(bind=engine) Base.metadata.create_all(bind = engine)
if __name__ == "__main__": if __name__ == "__main__":
# host = os.environ.get('POSTGRES_HOST')
# username = os.getenv('POSTGRES_USER') username = os.environ.get('POSTGRES_USER')
# password = os.getenv('POSTGRES_PASSWORD') password = os.environ.get('POSTGRES_PASSWORD')
# database_name = os.getenv('POSTGRES_DB') database_name = os.environ.get('POSTGRES_DB')
# environment = os.environ.get("ENV")
# host = config.postgres_host
# username = config.postgres_user
# password = config.postgres_password
# database_name = config.postgres_db
engine = create_admin_engine(username, password, host, database_name) engine = create_admin_engine(username, password, host, database_name)
connection = engine.connect()
print(Base.metadata.tables) # print(Base.metadata.tables)
if not database_exists(username, password, host, database_name): if not database_exists(connection, database_name):
print(f"Database {database_name} does not exist. Creating...") logger.info(f"Database {database_name} does not exist. Creating...")
create_database(username, password, host, database_name) create_database(connection, database_name)
print(f"Database {database_name} created successfully.") logger.info(f"Database {database_name} created successfully.")
connection.close()
engine.dispose()
create_tables(engine) create_tables(engine)

View file

@ -1,112 +0,0 @@
# this is needed to import classes from other modules
# script_dir = os.path.dirname(os.path.abspath(__file__))
# # Get the parent directory of your script and add it to sys.path
# parent_dir = os.path.dirname(script_dir)
# sys.path.append(parent_dir)
from cognitive_architecture.database.postgres.models import memory
from cognitive_architecture.database.postgres.models import metadatas
from cognitive_architecture.database.postgres.models import operation
from cognitive_architecture.database.postgres.models import sessions
from cognitive_architecture.database.postgres.models import user
from cognitive_architecture.database.postgres.models import docs
# from cognitive_architecture.config import Config
# config = Config()
# config.load()
from postgres.database import Base
from sqlalchemy import create_engine, text
import psycopg2
from dotenv import load_dotenv
load_dotenv()
import os
import os
if os.environ.get('AWS_ENV') == 'prd' or os.environ.get('AWS_ENV') == 'dev':
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
elif os.environ.get('AWS_ENV') == 'local':
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
else:
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
def create_admin_engine(username, password, host, database_name):
admin_url = f"postgresql://{username}:{password}@{host}:5432/{database_name}"
return create_engine(admin_url)
def database_exists(username, password, host, db_name):
engine = create_admin_engine(username, password, host, db_name)
connection = engine.connect()
query = text(f"SELECT 1 FROM pg_database WHERE datname='{db_name}'")
result = connection.execute(query).fetchone()
connection.close()
engine.dispose()
return result is not None
def create_database(username, password, host, db_name):
engine = create_admin_engine(username, password, host, db_name)
connection = engine.raw_connection()
connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = connection.cursor()
cursor.execute(f"CREATE DATABASE {db_name}")
cursor.close()
connection.close()
engine.dispose()
def drop_database(username, password, host, db_name):
engine = create_admin_engine(username, password, host)
connection = engine.raw_connection()
connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = connection.cursor()
cursor.execute(f"DROP DATABASE IF EXISTS {db_name}")
cursor.close()
connection.close()
engine.dispose()
print(f"Database {db_name} dropped successfully.")
def create_tables(engine):
Base.metadata.create_all(bind=engine)
if __name__ == "__main__":
#
# username = os.getenv('POSTGRES_USER')
# password = os.getenv('POSTGRES_PASSWORD')
# database_name = os.getenv('POSTGRES_DB')
# environment = os.environ.get("ENV")
# host = config.postgres_host
# username = config.postgres_user
# password = config.postgres_password
# database_name = config.postgres_db
engine = create_admin_engine(username, password, host, database_name)
print(Base.metadata.tables)
if not database_exists(username, password, host, database_name):
print(f"Database {database_name} does not exist. Creating...")
create_database(username, password, host, database_name)
print(f"Database {database_name} created successfully.")
create_tables(engine)

View file

@ -1,7 +1,7 @@
import os import os
from dotenv import load_dotenv
import os
import sys import sys
import boto3
from dotenv import load_dotenv
# Get the directory that contains your script # Get the directory that contains your script
current_dir = os.path.dirname(os.path.abspath(__file__)) current_dir = os.path.dirname(os.path.abspath(__file__))
@ -12,23 +12,19 @@ parent_dir = os.path.dirname(current_dir)
# Add the parent directory to sys.path # Add the parent directory to sys.path
sys.path.insert(0, parent_dir) sys.path.insert(0, parent_dir)
# API_ENABLED = os.environ.get("API_ENABLED", "False").lower() == "true" # API_ENABLED = os.environ.get("API_ENABLED", "False").lower() == "true"
import boto3
environment = os.getenv("AWS_ENV", "dev") environment = os.getenv("AWS_ENV", "dev")
def fetch_secret(secret_name, region_name, env_file_path): def fetch_secret(secret_name, region_name, env_file_path):
print("Initializing session") print("Initializing session")
session = boto3.session.Session() session = boto3.session.Session()
print("Session initialized") print("Session initialized")
client = session.client(service_name="secretsmanager", region_name=region_name) client = session.client(service_name="secretsmanager", region_name = region_name)
print("Client initialized") print("Client initialized")
try: try:
response = client.get_secret_value(SecretId=secret_name) response = client.get_secret_value(SecretId = secret_name)
except Exception as e: except Exception as e:
print(f"Error retrieving secret: {e}") print(f"Error retrieving secret: {e}")
return None return None
@ -38,52 +34,27 @@ def fetch_secret(secret_name, region_name, env_file_path):
else: else:
secret = response["SecretBinary"] secret = response["SecretBinary"]
with open(env_file_path, "w") as env_file:
env_file.write(secret)
if os.path.exists(env_file_path): if os.path.exists(env_file_path):
print(f"The .env file is located at: {os.path.abspath(env_file_path)}") print(f"The .env file is located at: {env_file_path}")
with open(env_file_path, "w") as env_file:
env_file.write(secret)
print("Secrets are added to the .env file.")
load_dotenv() load_dotenv()
PINECONE_API_KEY = os.getenv("PINECONE_API_KEY", "") print("The .env file is loaded.")
print("LEN OF PINECONE_API_KEY", len(PINECONE_API_KEY))
else: else:
print("The .env file was not found.") print(f"The .env file was not found at: {env_file_path}.")
return "Success in loading env files"
ENV_FILE_PATH = os.path.abspath("../.env")
env_file = "../.env" if os.path.exists(ENV_FILE_PATH):
if os.path.exists(env_file):
# Load default environment variables (.env) # Load default environment variables (.env)
load_dotenv() load_dotenv()
print("cognee is running") print("Cognee is already running...")
else: else:
secrets = fetch_secret( fetch_secret(
f"promethai-{environment}-backend-secretso-promethaijs-dotenv", f"promethai-{environment}-backend-secretso-promethaijs-dotenv",
"eu-west-1", "eu-west-1",
"../.env", ENV_FILE_PATH,
) )
if secrets:
print(secrets)
load_dotenv()
# Check if "dev" is present in the task ARN
if "dev" in environment:
# Fetch the secret
secrets = fetch_secret(
f"promethai-dev-backend-secretso-promethaijs-dotenv",
"eu-west-1",
"../.env",
)
load_dotenv()
elif "prd" in environment:
# Fetch the secret
secrets = fetch_secret(
f"promethai-prd-backend-secretso-promethaijs-dotenv",
"eu-west-1",
"../.env",
)
load_dotenv()

View file

@ -14,6 +14,7 @@ services:
- cognee_backend - cognee_backend
cognee: cognee:
container_name: cognee
networks: networks:
- cognee_backend - cognee_backend
build: build:
@ -30,6 +31,7 @@ services:
- 443:443 - 443:443
- 80:80 - 80:80
- 50051:50051 - 50051:50051
- 5678:5678
depends_on: depends_on:
- postgres - postgres
- neo4j - neo4j
@ -38,6 +40,7 @@ services:
limits: limits:
cpus: "4.0" cpus: "4.0"
memory: 8GB memory: 8GB
postgres: postgres:
image: postgres image: postgres
container_name: postgres container_name: postgres
@ -50,6 +53,7 @@ services:
- cognee_backend - cognee_backend
ports: ports:
- "5432:5432" - "5432:5432"
networks: networks:
cognee_backend: cognee_backend:
name: cognee_backend name: cognee_backend

View file

@ -1,24 +1,35 @@
#!/bin/bash #!/bin/bash
export ENVIRONMENT # export ENVIRONMENT
# Run Python scripts with error handling
echo "Running fetch_secret.py" echo $DEBUG
echo $ENVIRONMENT
if [ "$ENVIRONMENT" != "local" ]; then if [ "$ENVIRONMENT" != "local" ]; then
echo "Running fetch_secret.py" echo "Running fetch_secret.py"
python cognitive_architecture/fetch_secret.py python cognitive_architecture/fetch_secret.py
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Error: fetch_secret.py failed" echo "Error: fetch_secret.py failed"
exit 1 exit 1
fi fi
else else
echo "ENVIRONMENT ($ENVIRONMENT) is active, skipping fetch_secret.py" echo '"local" environment is active, skipping fetch_secret.py'
fi fi
echo "Running create_database.py" echo "Running create_database.py"
python cognitive_architecture/database/create_database.py python cognitive_architecture/database/create_database.py
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo "Error: create_database.py failed" echo "Error: create_database.py failed"
exit 1 exit 1
fi fi
# Start Gunicorn
echo "Starting Gunicorn" echo "Starting Gunicorn"
gunicorn -w 3 -k uvicorn.workers.UvicornWorker -t 30000 --bind=0.0.0.0:8000 --bind=0.0.0.0:443 --log-level debug api:app
if [ "$DEBUG" = true ]; then
echo "Waiting for the debugger to attach..."
python -m debugpy --wait-for-client --listen 0.0.0.0:5678 -m gunicorn -w 3 -k uvicorn.workers.UvicornWorker -t 30000 --bind=0.0.0.0:8000 --bind=0.0.0.0:443 --log-level debug api:app
else
gunicorn -w 3 -k uvicorn.workers.UvicornWorker -t 30000 --bind=0.0.0.0:8000 --bind=0.0.0.0:443 --log-level debug api:app
fi

11
main.py
View file

@ -183,10 +183,17 @@ async def user_query_to_graph_db(session: AsyncSession, user_id: str, query_inpu
) )
detected_language = detect_language(query_input) detected_language = detect_language(query_input)
translated_query = translate_text(query_input, detected_language, "en")
if detected_language is not "en":
translated_query = translate_text(query_input, detected_language, "en")
else:
translated_query = query_input
neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, password=config.graph_database_password) neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, password=config.graph_database_password)
cypher_query = await neo4j_graph_db.generate_cypher_query_for_user_prompt_decomposition(user_id,translated_query)
cypher_query = await neo4j_graph_db.generate_cypher_query_for_user_prompt_decomposition(user_id, translated_query)
result = neo4j_graph_db.query(cypher_query) result = neo4j_graph_db.query(cypher_query)
neo4j_graph_db.run_merge_query(user_id=user_id, memory_type="SemanticMemory", similarity_threshold=0.8) neo4j_graph_db.run_merge_query(user_id=user_id, memory_type="SemanticMemory", similarity_threshold=0.8)
neo4j_graph_db.run_merge_query(user_id=user_id, memory_type="EpisodicMemory", similarity_threshold=0.8) neo4j_graph_db.run_merge_query(user_id=user_id, memory_type="EpisodicMemory", similarity_threshold=0.8)
neo4j_graph_db.close() neo4j_graph_db.close()

View file

@ -59,10 +59,7 @@ neo4j = "^5.14.1"
grpcio = "^1.60.0" grpcio = "^1.60.0"
langdetect = "^1.0.9" langdetect = "^1.0.9"
iso639 = "^0.1.4" iso639 = "^0.1.4"
debugpy = "^1.8.0"
[build-system] [build-system]
requires = ["poetry-core"] requires = ["poetry-core"]