Merge pull request #41 from topoteretes/chore/debugging

chore: add debugpy and update readme
This commit is contained in:
Vasilije 2024-02-15 10:59:00 +01:00 committed by GitHub
commit fdb16dc5a6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 190 additions and 370 deletions

1
.gitignore vendored
View file

@ -162,3 +162,4 @@ cython_debug/
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
.vscode/

View file

@ -1,13 +1,15 @@
FROM python:3.11
# Set build argument
ARG DEBUG
ARG API_ENABLED
# Set environment variable based on the build argument
ENV API_ENABLED=${API_ENABLED} \
PIP_NO_CACHE_DIR=true
ENV DEBUG=${DEBUG}
ENV API_ENABLED=${API_ENABLED}
ENV PIP_NO_CACHE_DIR=true
ENV PATH="${PATH}:/root/.poetry/bin"
RUN pip install poetry
WORKDIR /app
@ -15,6 +17,7 @@ COPY pyproject.toml poetry.lock /app/
# Install the dependencies
RUN poetry config virtualenvs.create false && \
poetry lock --no-update && \
poetry install --no-root --no-dev
RUN apt-get update -q && \
@ -37,15 +40,13 @@ RUN apt-get update -q && \
/tmp/* \
/var/tmp/*
WORKDIR /app
COPY cognitive_architecture/ /app/cognitive_architecture
COPY main.py /app
COPY api.py /app
COPY entrypoint.sh /app/entrypoint.sh
RUN chmod +x /app/entrypoint.sh
ENTRYPOINT ["/app/entrypoint.sh"]
ENTRYPOINT ["/app/entrypoint.sh"]

188
README.md
View file

@ -2,107 +2,114 @@
AI Applications and RAGs - Cognitive Architecture, Testability, Production Ready Apps
<p align="left">
<a href="https://prometh.ai//#gh-light-mode-only">
<img src="assets/topoteretes_logo.png" width="5%" alt="promethAI logo" />
<p>
<a href="https://cognee.ai" target="_blank">
<img src="assets/cognee-logo.png" width="160px" alt="Cognee logo" />
</a>
</p>
<p align="left"><i>Open-source framework for building and testing RAGs and Cognitive Architectures, designed for accuracy, transparency, and control.</i></p>
<p align="left">
<a href="https://github.com/topoteretes/cognee/fork" target="blank">
<img src="https://img.shields.io/github/forks/topoteretes/cognee?style=for-the-badge" alt="cognee forks"/>
</a>
<a href="https://github.com/topoteretes/cognee/stargazers" target="blank">
<img src="https://img.shields.io/github/stars/topoteretes/cognee?style=for-the-badge" alt="cognee stars"/>
</a>
<a href="https://github.com/topoteretes/cognee/pulls" target="blank">
<img src="https://img.shields.io/github/issues-pr/topoteretes/cognee?style=for-the-badge" alt="cognee pull-requests"/>
</a>
<a href='https://github.com/topoteretes/cognee/releases'>
<img src='https://img.shields.io/github/release/topoteretes/cognee?&label=Latest&style=for-the-badge'>
</a>
<p>
<i>Open-source framework for building and testing RAGs and Cognitive Architectures, designed for accuracy, transparency, and control.</i>
</p>
[//]: # (<p align="center"><b>Follow PromethAI </b></p>)
[//]: # (<p align="center">)
[//]: # (<a href="https://twitter.com/_promethAI" target="blank">)
[//]: # (<img src="https://img.shields.io/twitter/follow/_promethAI?label=Follow: _promethAI&style=social" alt="Follow _promethAI"/>)
[//]: # (</a>)
[//]: # (<p align="center">)
[//]: # (<a href="https://prometh.ai" target="_blank"><img src="https://img.shields.io/twitter/url?label=promethAI Website&logo=website&style=social&url=https://github.com/topoteretes/PromethAI-Memory"/></a>)
[//]: # (<p align="center">)
[//]: # (<a href="https://www.youtube.com/@_promethAI" target="_blank"><img src="https://img.shields.io/twitter/url?label=Youtube&logo=youtube&style=social&url=https://github.com/topoteretes/PromethAI-Memory"/></a>)
[//]: # (</p>)
<p align="left"><b>Share cognee Repository</b></p>
<p align="left">
<a href="https://twitter.com/intent/tweet?text=Check%20this%20GitHub%20repository%20out.%20promethAI%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.&url=https://github.com/topoteretes/cognee&hashtags=promethAI,AGI,Autonomics,future" target="blank">
<img src="https://img.shields.io/twitter/follow/_promethAI?label=Share Repo on Twitter&style=social" alt="Follow _promethAI"/></a>
<a href="https://t.me/share/url?text=Check%20this%20GitHub%20repository%20out.%20promethAI%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.&url=https://github.com/topoteretes/cognee" target="_blank"><img src="https://img.shields.io/twitter/url?label=Telegram&logo=Telegram&style=social&url=https://github.com/topoteretes/cognee" alt="Share on Telegram"/></a>
<a href="https://api.whatsapp.com/send?text=Check%20this%20GitHub%20repository%20out.%20promethAI%20-%20Let's%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.%20https://github.com/topoteretes/cognee"><img src="https://img.shields.io/twitter/url?label=whatsapp&logo=whatsapp&style=social&url=https://github.com/topoteretes/cognee" /></a> <a href="https://www.reddit.com/submit?url=https://github.com/topoteretes/cognee&title=Check%20this%20GitHub%20repository%20out.%20promethAI%20-%20Let's%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.
" target="blank">
<img src="https://img.shields.io/twitter/url?label=Reddit&logo=Reddit&style=social&url=https://github.com/topoteretes/cognee" alt="Share on Reddit"/>
</a> <a href="mailto:?subject=Check%20this%20GitHub%20repository%20out.&body=promethAI%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.%3A%0Ahttps://github.com/topoteretes/cognee" target="_blank"><img src="https://img.shields.io/twitter/url?label=Gmail&logo=Gmail&style=social&url=https://github.com/topoteretes/cognee"/></a> <a href="https://www.buymeacoffee.com/promethAI" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/default-orange.png" alt="Buy Me A Coffee" height="23" width="100" style="border-radius:1px"></a>
<p>
<a href="https://github.com/topoteretes/cognee/fork">
<img src="https://img.shields.io/github/forks/topoteretes/cognee?style=for-the-badge" alt="cognee forks"/>
</a>
<a href="https://github.com/topoteretes/cognee/stargazers">
<img src="https://img.shields.io/github/stars/topoteretes/cognee?style=for-the-badge" alt="cognee stars"/>
</a>
<a href="https://github.com/topoteretes/cognee/pulls">
<img src="https://img.shields.io/github/issues-pr/topoteretes/cognee?style=for-the-badge" alt="cognee pull-requests"/>
</a>
<a href="https://github.com/topoteretes/cognee/releases">
<img src="https://img.shields.io/github/release/topoteretes/cognee?&label=Latest&style=for-the-badge" alt="cognee releases>
</a>
</p>
<p>
<b>Share cognee Repository</b>
</p>
<p>
<a href="https://twitter.com/intent/tweet?text=Check%20this%20GitHub%20repository%20out.%20Cognee%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.&url=https://github.com/topoteretes/cognee&hashtags=AGI,Autonomics,Cognee,future" target="_blank">
<img src="https://img.shields.io/twitter/follow/_promethAI?label=Share Repo on Twitter&style=social" alt="Follow Cognee"/>
</a>
<a href="https://t.me/share/url?text=Check%20this%20GitHub%20repository%20out.%20Cognee%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.&url=https://github.com/topoteretes/cognee" target="_blank">
<img src="https://img.shields.io/twitter/url?label=Telegram&logo=Telegram&style=social&url=https://github.com/topoteretes/cognee" alt="Share on Telegram"/>
</a>
<a href="https://api.whatsapp.com/send?text=Check%20this%20GitHub%20repository%20out.%20Cognee%20-%20Let's%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.%20https://github.com/topoteretes/cognee" target="_blank">
<img src="https://img.shields.io/twitter/url?label=whatsapp&logo=whatsapp&style=social&url=https://github.com/topoteretes/cognee" />
</a>
<a href="https://www.reddit.com/submit?url=https://github.com/topoteretes/cognee&title=Check%20this%20GitHub%20repository%20out.%20Cognee%20-%20Let's%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.
" target="_blank">
<img src="https://img.shields.io/twitter/url?label=Reddit&logo=Reddit&style=social&url=https://github.com/topoteretes/cognee" alt="Share on Reddit"/>
</a>
<a href="mailto:?subject=Check%20this%20GitHub%20repository%20out.&body=Cognee%20-%20Let%27s%20you%20easily%20build,%20manage%20and%20run%20useful%20autonomous%20AI%20agents.%3A%0Ahttps://github.com/topoteretes/cognee" target="_blank">
<img src="https://img.shields.io/twitter/url?label=Gmail&logo=Gmail&style=social&url=https://github.com/topoteretes/cognee"/>
</a>
<a href="https://www.buymeacoffee.com/promethAI" target="_blank">
<img src="https://cdn.buymeacoffee.com/buttons/default-orange.png" alt="Buy Me A Coffee" height="23" width="100" style="border-radius:1px">
</a>
</p>
<hr>
[Star us on Github!](https://www.github.com/topoteretes/cognee)
Jump into the world of RAG architecture, inspired by human cognitive processes, using Python.
[cognee](www.cognee.ai) runs in iterations, from POC towards production ready code.
<a href="https://www.cognee.ai" target="_blank">Cognee</a> runs in iterations, from POC towards production ready code.
To read more about the approach and details on cognitive architecture, see the blog post: [AI Applications and RAGs - Cognitive Architecture, Testability, Production Ready Apps](https://topoteretes.notion.site/Going-beyond-Langchain-Weaviate-and-towards-a-production-ready-modern-data-platform-7351d77a1eba40aab4394c24bef3a278?pvs=4)
To read more about the approach and details on cognitive architecture, see the blog post: <a href="https://topoteretes.notion.site/Going-beyond-Langchain-Weaviate-and-towards-a-production-ready-modern-data-platform-7351d77a1eba40aab4394c24bef3a278?pvs=4" target="_blank">AI Applications and RAGs - Cognitive Architecture, Testability, Production Ready Apps</a>
Try it yourself on Whatsapp with one of our [partners](www.keepi.ai) by typing /save _content_ followed by /query _content_
### Get Started in Moments
Running [cognee](www.cognee.ai) is a breeze. Simply run `cp env.example .env` and `docker compose up cognee` in your terminal.
Send API requests add-memory, user-query-to-graph, document-to-graph-db, user-query-processor to the locahost:8000
### Current Focus
#### Integration to keepi.ai and other apps
Uses Neo4j to map user preferences into a graph structure consisting of semantic, episodic, and procedural memory.
Fetches information and stores information and files on Whatsapp chatbot using [keepi.ai](www.keepi.ai)
Uses the graph to answer user queries and store new information in the graph.
### Architecture
![Image](https://github.com/topoteretes/PromethAI-Memory/blob/main/assets/img.png)
Try it yourself on Whatsapp with one of our <a href="https://keepi.ai">partners</a> by typing `/save {content you want to save}` followed by `/query {knowledge you saved previously}`
## Getting started
In order to run cognee you need to have <a href="https://docs.docker.com/get-docker" target="_blank">Docker</a> installed on your machine.
Run <a href="https://www.cognee.ai" target="_blank">Cognee</a> in a couple of steps:
- Run `cp .env.template .env` in your terminal and set all the environment variables
- Run `docker compose up` in order to start graph and relational databases
- Run `docker compose up cognee` in order start Cognee
<!-- Send API requests add-memory, user-query-to-graph, document-to-graph-db, user-query-processor to the locahost:8000 -->
## Debugging
In order to run Cognee with debugger attached you need to build the Cognee image with the `DEBUG` flag set to true.
- `docker compose build cognee --no-cache --build-arg DEBUG=true`
- `docker compose up cognee`
### Visual Studio Code debugger
Add the following configuration to VSCode `Run and Debug` configurations array:
```json
{
"name": "Attach (remote debug)",
"type": "python",
"request": "attach",
"port": 5678,
"host": "127.0.0.1",
"pathMappings": [{
"localRoot": "${workspaceFolder}",
"remoteRoot": "."
}]
}
```
It should look like this:
<img src="assets/vscode-debug-config.png" width="500px" />
## Current Focus
### Integration with keepi.ai and other apps
- Cognee uses Neo4j graph database to map user data into a graph structure consisting of semantic, episodic, and procedural memory.
- Stores data and files through the WhatsApp chatbot <a href="https://keepi.ai">keepi.ai</a>
- Uses the graph to answer user queries and store new information in the graph.
## Architecture
### How Cognee Enhances Your Contextual Memory
@ -112,15 +119,6 @@ Our framework for the OpenAI, Graph (Neo4j) and Vector (Weaviate) databases intr
- Document Topology: Structure and store documents in public and private domains.
- Personalized Context: Provide a context object to the LLM for a better response.
</br>
![Image](assets/architecture.png)

41
api.py
View file

@ -15,6 +15,9 @@ from cognitive_architecture.vectorstore_manager import Memory
from dotenv import load_dotenv
from main import add_documents_to_graph_db, user_context_enrichment
from cognitive_architecture.config import Config
from fastapi import Depends
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
# Set up logging
logging.basicConfig(
@ -25,7 +28,10 @@ logging.basicConfig(
logger = logging.getLogger(__name__)
load_dotenv()
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
config = Config()
config.load()
app = FastAPI(debug=True)
#
# from auth.cognito.JWTBearer import JWTBearer
@ -33,31 +39,12 @@ app = FastAPI(debug=True)
#
# auth = JWTBearer(jwks)
from fastapi import Depends
config = Config()
config.load()
@app.get(
"/",
)
"""
Root endpoint that returns a welcome message.
"""
async def root():
class ImageResponse(BaseModel):
success: bool
message: str
@app.get(
"/",
)
@app.get("/")
async def root():
"""
Root endpoint that returns a welcome message.
"""
return {"message": "Hello, World, I am alive!"}
return { "message": "Hello, World, I am alive!" }
@app.get("/health")
@ -234,10 +221,11 @@ async def drop_db(payload: Payload):
else:
pass
from cognitive_architecture.database.create_database_tst import drop_database, create_admin_engine
from cognitive_architecture.database.create_database import drop_database, create_admin_engine
engine = create_admin_engine(username, password, host, database_name)
drop_database(engine)
connection = engine.raw_connection()
drop_database(connection, database_name)
return JSONResponse(content={"response": "DB dropped"}, status_code=200)
else:
@ -249,10 +237,11 @@ async def drop_db(payload: Payload):
else:
pass
from cognitive_architecture.database.create_database_tst import create_database, create_admin_engine
from cognitive_architecture.database.create_database import create_database, create_admin_engine
engine = create_admin_engine(username, password, host, database_name)
create_database(engine)
connection = engine.raw_connection()
create_database(connection, database_name)
return JSONResponse(content={"response": " DB drop"}, status_code=200)

View file

Before

Width:  |  Height:  |  Size: 174 KiB

After

Width:  |  Height:  |  Size: 174 KiB

BIN
assets/cognee-logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

View file

Before

Width:  |  Height:  |  Size: 161 KiB

After

Width:  |  Height:  |  Size: 161 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

View file

@ -1,110 +1,63 @@
# this is needed to import classes from other modules
# script_dir = os.path.dirname(os.path.abspath(__file__))
# # Get the parent directory of your script and add it to sys.path
# parent_dir = os.path.dirname(script_dir)
# sys.path.append(parent_dir)
import os
import logging
import psycopg2
from dotenv import load_dotenv
from postgres.database import Base
from sqlalchemy import create_engine, text
from postgres.models import memory
from postgres.models import metadatas
from postgres.models import operation
from postgres.models import sessions
from postgres.models import user
from postgres.models import docs
# from cognitive_architecture.config import Config
# config = Config()
# config.load()
from postgres.database import Base
from sqlalchemy import create_engine, text
import psycopg2
from dotenv import load_dotenv
load_dotenv()
import os
import os
if os.environ.get('AWS_ENV') == 'prd' or os.environ.get('AWS_ENV') == 'dev':
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
elif os.environ.get('AWS_ENV') == 'local':
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
else:
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
logger = logging.getLogger(__name__)
def create_admin_engine(username, password, host, database_name):
admin_url = f"postgresql://{username}:{password}@{host}:5432/{database_name}"
return create_engine(admin_url)
def database_exists(username, password, host, db_name):
engine = create_admin_engine(username, password, host, db_name)
connection = engine.connect()
def database_exists(connection, db_name):
query = text(f"SELECT 1 FROM pg_database WHERE datname='{db_name}'")
result = connection.execute(query).fetchone()
connection.close()
engine.dispose()
return result is not None
def create_database(username, password, host, db_name):
engine = create_admin_engine(username, password, host, db_name)
connection = engine.raw_connection()
def create_database(connection, db_name):
connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = connection.cursor()
cursor.execute(f"CREATE DATABASE {db_name}")
cursor.close()
connection.close()
engine.dispose()
def drop_database(username, password, host, db_name):
engine = create_admin_engine(username, password, host)
connection = engine.raw_connection()
def drop_database(connection, db_name):
connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = connection.cursor()
cursor.execute(f"DROP DATABASE IF EXISTS {db_name}")
cursor.close()
connection.close()
engine.dispose()
print(f"Database {db_name} dropped successfully.")
def create_tables(engine):
Base.metadata.create_all(bind=engine)
Base.metadata.create_all(bind = engine)
if __name__ == "__main__":
#
# username = os.getenv('POSTGRES_USER')
# password = os.getenv('POSTGRES_PASSWORD')
# database_name = os.getenv('POSTGRES_DB')
# environment = os.environ.get("ENV")
# host = config.postgres_host
# username = config.postgres_user
# password = config.postgres_password
# database_name = config.postgres_db
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
engine = create_admin_engine(username, password, host, database_name)
connection = engine.connect()
print(Base.metadata.tables)
# print(Base.metadata.tables)
if not database_exists(username, password, host, database_name):
print(f"Database {database_name} does not exist. Creating...")
create_database(username, password, host, database_name)
print(f"Database {database_name} created successfully.")
if not database_exists(connection, database_name):
logger.info(f"Database {database_name} does not exist. Creating...")
create_database(connection, database_name)
logger.info(f"Database {database_name} created successfully.")
connection.close()
engine.dispose()
create_tables(engine)

View file

@ -1,112 +0,0 @@
# this is needed to import classes from other modules
# script_dir = os.path.dirname(os.path.abspath(__file__))
# # Get the parent directory of your script and add it to sys.path
# parent_dir = os.path.dirname(script_dir)
# sys.path.append(parent_dir)
from cognitive_architecture.database.postgres.models import memory
from cognitive_architecture.database.postgres.models import metadatas
from cognitive_architecture.database.postgres.models import operation
from cognitive_architecture.database.postgres.models import sessions
from cognitive_architecture.database.postgres.models import user
from cognitive_architecture.database.postgres.models import docs
# from cognitive_architecture.config import Config
# config = Config()
# config.load()
from postgres.database import Base
from sqlalchemy import create_engine, text
import psycopg2
from dotenv import load_dotenv
load_dotenv()
import os
import os
if os.environ.get('AWS_ENV') == 'prd' or os.environ.get('AWS_ENV') == 'dev':
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
elif os.environ.get('AWS_ENV') == 'local':
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
else:
host = os.environ.get('POSTGRES_HOST')
username = os.environ.get('POSTGRES_USER')
password = os.environ.get('POSTGRES_PASSWORD')
database_name = os.environ.get('POSTGRES_DB')
def create_admin_engine(username, password, host, database_name):
admin_url = f"postgresql://{username}:{password}@{host}:5432/{database_name}"
return create_engine(admin_url)
def database_exists(username, password, host, db_name):
engine = create_admin_engine(username, password, host, db_name)
connection = engine.connect()
query = text(f"SELECT 1 FROM pg_database WHERE datname='{db_name}'")
result = connection.execute(query).fetchone()
connection.close()
engine.dispose()
return result is not None
def create_database(username, password, host, db_name):
engine = create_admin_engine(username, password, host, db_name)
connection = engine.raw_connection()
connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = connection.cursor()
cursor.execute(f"CREATE DATABASE {db_name}")
cursor.close()
connection.close()
engine.dispose()
def drop_database(username, password, host, db_name):
engine = create_admin_engine(username, password, host)
connection = engine.raw_connection()
connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
cursor = connection.cursor()
cursor.execute(f"DROP DATABASE IF EXISTS {db_name}")
cursor.close()
connection.close()
engine.dispose()
print(f"Database {db_name} dropped successfully.")
def create_tables(engine):
Base.metadata.create_all(bind=engine)
if __name__ == "__main__":
#
# username = os.getenv('POSTGRES_USER')
# password = os.getenv('POSTGRES_PASSWORD')
# database_name = os.getenv('POSTGRES_DB')
# environment = os.environ.get("ENV")
# host = config.postgres_host
# username = config.postgres_user
# password = config.postgres_password
# database_name = config.postgres_db
engine = create_admin_engine(username, password, host, database_name)
print(Base.metadata.tables)
if not database_exists(username, password, host, database_name):
print(f"Database {database_name} does not exist. Creating...")
create_database(username, password, host, database_name)
print(f"Database {database_name} created successfully.")
create_tables(engine)

View file

@ -1,7 +1,7 @@
import os
from dotenv import load_dotenv
import os
import sys
import boto3
from dotenv import load_dotenv
# Get the directory that contains your script
current_dir = os.path.dirname(os.path.abspath(__file__))
@ -12,23 +12,19 @@ parent_dir = os.path.dirname(current_dir)
# Add the parent directory to sys.path
sys.path.insert(0, parent_dir)
# API_ENABLED = os.environ.get("API_ENABLED", "False").lower() == "true"
import boto3
environment = os.getenv("AWS_ENV", "dev")
def fetch_secret(secret_name, region_name, env_file_path):
print("Initializing session")
session = boto3.session.Session()
print("Session initialized")
client = session.client(service_name="secretsmanager", region_name=region_name)
client = session.client(service_name="secretsmanager", region_name = region_name)
print("Client initialized")
try:
response = client.get_secret_value(SecretId=secret_name)
response = client.get_secret_value(SecretId = secret_name)
except Exception as e:
print(f"Error retrieving secret: {e}")
return None
@ -38,52 +34,27 @@ def fetch_secret(secret_name, region_name, env_file_path):
else:
secret = response["SecretBinary"]
with open(env_file_path, "w") as env_file:
env_file.write(secret)
if os.path.exists(env_file_path):
print(f"The .env file is located at: {os.path.abspath(env_file_path)}")
print(f"The .env file is located at: {env_file_path}")
with open(env_file_path, "w") as env_file:
env_file.write(secret)
print("Secrets are added to the .env file.")
load_dotenv()
PINECONE_API_KEY = os.getenv("PINECONE_API_KEY", "")
print("LEN OF PINECONE_API_KEY", len(PINECONE_API_KEY))
print("The .env file is loaded.")
else:
print("The .env file was not found.")
return "Success in loading env files"
print(f"The .env file was not found at: {env_file_path}.")
ENV_FILE_PATH = os.path.abspath("../.env")
env_file = "../.env"
if os.path.exists(env_file):
if os.path.exists(ENV_FILE_PATH):
# Load default environment variables (.env)
load_dotenv()
print("cognee is running")
print("Cognee is already running...")
else:
secrets = fetch_secret(
fetch_secret(
f"promethai-{environment}-backend-secretso-promethaijs-dotenv",
"eu-west-1",
"../.env",
ENV_FILE_PATH,
)
if secrets:
print(secrets)
load_dotenv()
# Check if "dev" is present in the task ARN
if "dev" in environment:
# Fetch the secret
secrets = fetch_secret(
f"promethai-dev-backend-secretso-promethaijs-dotenv",
"eu-west-1",
"../.env",
)
load_dotenv()
elif "prd" in environment:
# Fetch the secret
secrets = fetch_secret(
f"promethai-prd-backend-secretso-promethaijs-dotenv",
"eu-west-1",
"../.env",
)
load_dotenv()

View file

@ -14,6 +14,7 @@ services:
- cognee_backend
cognee:
container_name: cognee
networks:
- cognee_backend
build:
@ -30,6 +31,7 @@ services:
- 443:443
- 80:80
- 50051:50051
- 5678:5678
depends_on:
- postgres
- neo4j
@ -38,6 +40,7 @@ services:
limits:
cpus: "4.0"
memory: 8GB
postgres:
image: postgres
container_name: postgres
@ -50,6 +53,7 @@ services:
- cognee_backend
ports:
- "5432:5432"
networks:
cognee_backend:
name: cognee_backend

View file

@ -1,24 +1,35 @@
#!/bin/bash
export ENVIRONMENT
# Run Python scripts with error handling
echo "Running fetch_secret.py"
# export ENVIRONMENT
echo "Debug mode: $DEBUG"
echo "Environment: $ENVIRONMENT"
if [ "$ENVIRONMENT" != "local" ]; then
echo "Running fetch_secret.py"
python cognitive_architecture/fetch_secret.py
if [ $? -ne 0 ]; then
echo "Error: fetch_secret.py failed"
exit 1
fi
else
echo "ENVIRONMENT ($ENVIRONMENT) is active, skipping fetch_secret.py"
echo '"local" environment is active, skipping fetch_secret.py'
fi
echo "Running create_database.py"
python cognitive_architecture/database/create_database.py
if [ $? -ne 0 ]; then
echo "Error: create_database.py failed"
exit 1
fi
# Start Gunicorn
echo "Starting Gunicorn"
gunicorn -w 3 -k uvicorn.workers.UvicornWorker -t 30000 --bind=0.0.0.0:8000 --bind=0.0.0.0:443 --log-level debug api:app
if [ "$DEBUG" = true ]; then
echo "Waiting for the debugger to attach..."
python -m debugpy --wait-for-client --listen 0.0.0.0:5678 -m gunicorn -w 3 -k uvicorn.workers.UvicornWorker -t 30000 --bind=0.0.0.0:8000 --bind=0.0.0.0:443 --log-level debug api:app
else
gunicorn -w 3 -k uvicorn.workers.UvicornWorker -t 30000 --bind=0.0.0.0:8000 --bind=0.0.0.0:443 --log-level debug api:app
fi

11
main.py
View file

@ -183,10 +183,17 @@ async def user_query_to_graph_db(session: AsyncSession, user_id: str, query_inpu
)
detected_language = detect_language(query_input)
translated_query = translate_text(query_input, detected_language, "en")
if detected_language != "en":
translated_query = translate_text(query_input, detected_language, "en")
else:
translated_query = query_input
neo4j_graph_db = Neo4jGraphDB(url=config.graph_database_url, username=config.graph_database_username, password=config.graph_database_password)
cypher_query = await neo4j_graph_db.generate_cypher_query_for_user_prompt_decomposition(user_id,translated_query)
cypher_query = await neo4j_graph_db.generate_cypher_query_for_user_prompt_decomposition(user_id, translated_query)
result = neo4j_graph_db.query(cypher_query)
neo4j_graph_db.run_merge_query(user_id=user_id, memory_type="SemanticMemory", similarity_threshold=0.8)
neo4j_graph_db.run_merge_query(user_id=user_id, memory_type="EpisodicMemory", similarity_threshold=0.8)
neo4j_graph_db.close()

View file

@ -59,10 +59,7 @@ neo4j = "^5.14.1"
grpcio = "^1.60.0"
langdetect = "^1.0.9"
iso639 = "^0.1.4"
debugpy = "^1.8.0"
[build-system]
requires = ["poetry-core"]