Added initial API logic, crud for graph, connected vectordb and graph
This commit is contained in:
parent
ceba0d31e7
commit
fe78d4e711
30 changed files with 7099 additions and 296 deletions
|
|
@ -43,9 +43,8 @@ RUN apt-get update -q && \
|
|||
|
||||
WORKDIR /app
|
||||
COPY cognitive_architecture/ /app
|
||||
COPY cognitive_architecture/scripts/ /app
|
||||
|
||||
COPY cognitive_architecture/entrypoint.sh /app/entrypoint.sh
|
||||
COPY cognitive_architecture/scripts/create_database.py /app/create_database.py
|
||||
RUN chmod +x /app/entrypoint.sh
|
||||
|
||||
ENTRYPOINT ["/app/entrypoint.sh"]
|
||||
521
level_4/cognitive_architecture/api.py
Normal file
521
level_4/cognitive_architecture/api.py
Normal file
|
|
@ -0,0 +1,521 @@
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
from enum import Enum
|
||||
from typing import Dict, Any
|
||||
|
||||
import uvicorn
|
||||
from fastapi import FastAPI, BackgroundTasks, HTTPException
|
||||
from fastapi.responses import JSONResponse
|
||||
from pydantic import BaseModel
|
||||
|
||||
from database.postgres.database import AsyncSessionLocal
|
||||
from database.postgres.database_crud import session_scope
|
||||
from vectorstore_manager import Memory
|
||||
from dotenv import load_dotenv
|
||||
|
||||
|
||||
# Set up logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO, # Set the logging level (e.g., DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||
format="%(asctime)s [%(levelname)s] %(message)s", # Set the log message format
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
load_dotenv()
|
||||
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
|
||||
app = FastAPI(debug=True)
|
||||
#
|
||||
# from auth.cognito.JWTBearer import JWTBearer
|
||||
# from auth.auth import jwks
|
||||
#
|
||||
# auth = JWTBearer(jwks)
|
||||
|
||||
from fastapi import Depends
|
||||
|
||||
from config import Config
|
||||
|
||||
config = Config()
|
||||
config.load()
|
||||
|
||||
class ImageResponse(BaseModel):
|
||||
success: bool
|
||||
message: str
|
||||
|
||||
|
||||
@app.get(
|
||||
"/",
|
||||
)
|
||||
async def root():
|
||||
"""
|
||||
Root endpoint that returns a welcome message.
|
||||
"""
|
||||
return {"message": "Hello, World, I am alive!"}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
def health_check():
|
||||
"""
|
||||
Health check endpoint that returns the server status.
|
||||
"""
|
||||
return {"status": "OK"}
|
||||
|
||||
|
||||
class Payload(BaseModel):
|
||||
payload: Dict[str, Any]
|
||||
|
||||
|
||||
def memory_factory(memory_type):
|
||||
load_dotenv()
|
||||
|
||||
class Payload(BaseModel):
|
||||
payload: Dict[str, Any]
|
||||
|
||||
@app.post("/{memory_type}/add-memory", response_model=dict)
|
||||
async def add_memory(
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
):
|
||||
try:
|
||||
logging.info(" Adding to Memory ")
|
||||
decoded_payload = payload.payload
|
||||
async with session_scope(session=AsyncSessionLocal()) as session:
|
||||
memory = await Memory.create_memory(
|
||||
decoded_payload["user_id"], session,"SEMANTICMEMORY", namespace="SEMANTICMEMORY"
|
||||
)
|
||||
|
||||
# Adding a memory instance
|
||||
await memory.add_memory_instance(decoded_payload["memory_object"])
|
||||
|
||||
# Managing memory attributes
|
||||
existing_user = await Memory.check_existing_user(
|
||||
decoded_payload["user_id"], session
|
||||
)
|
||||
await memory.manage_memory_attributes(existing_user)
|
||||
await memory.add_dynamic_memory_class(
|
||||
decoded_payload["memory_object"],
|
||||
decoded_payload["memory_object"].upper(),
|
||||
)
|
||||
memory_class = decoded_payload["memory_object"] + "_class"
|
||||
dynamic_memory_class = getattr(memory, memory_class.lower(), None)
|
||||
|
||||
await memory.add_method_to_class(dynamic_memory_class, "add_memories")
|
||||
# await memory.add_method_to_class(memory.semanticmemory_class, 'fetch_memories')
|
||||
output = await memory.dynamic_method_call(
|
||||
dynamic_memory_class,
|
||||
"add_memories",
|
||||
observation="some_observation",
|
||||
params=decoded_payload["params"],
|
||||
loader_settings=decoded_payload["loader_settings"],
|
||||
)
|
||||
return JSONResponse(content={"response": output}, status_code=200)
|
||||
|
||||
except Exception as e:
|
||||
return JSONResponse(
|
||||
content={"response": {"error": str(e)}}, status_code=503
|
||||
)
|
||||
|
||||
@app.post("/user-to-graph-query")
|
||||
async def generate_cypher_query(payload: Payload,):
|
||||
try:
|
||||
|
||||
from database.graph_database.graph import Neo4jGraphDB
|
||||
neo4j_graph_db = Neo4jGraphDB(config.graph_database_url, config.graph_database_username, config.graph_database_password)
|
||||
decoded_payload = payload.payload
|
||||
cypher_query = await neo4j_graph_db.generate_cypher_query_for_user_prompt_decomposition(decoded_payload['user_id'],
|
||||
decoded_payload['prompt'])
|
||||
|
||||
# Execute the query - replace this with the actual execution method
|
||||
async with session_scope(session=AsyncSessionLocal()) as session:
|
||||
# Assuming you have a method in Neo4jGraphDB to execute the query
|
||||
result = await neo4j_graph_db.query(cypher_query, session)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
@app.post("/user-to-document-summary")
|
||||
async def generate_document_summary(payload: Payload,):
|
||||
try:
|
||||
from database.graph_database.graph import Neo4jGraphDB
|
||||
neo4j_graph_db = Neo4jGraphDB(config.graph_database_url, config.graph_database_username, config.graph_database_password)
|
||||
decoded_payload = payload.payload
|
||||
call_of_the_wild_summary = {
|
||||
"user_id": decoded_payload["user_id"],
|
||||
"document_category": "Classic Literature",
|
||||
"title": "The Call of the Wild",
|
||||
"summary": (
|
||||
"'The Call of the Wild' is a novel by Jack London set in the Yukon during the 1890s Klondike "
|
||||
"Gold Rush—a period when strong sled dogs were in high demand. The novel's central character "
|
||||
"is a dog named Buck, a domesticated dog living at a ranch in the Santa Clara Valley of California "
|
||||
"as the story opens. Stolen from his home and sold into the brutal existence of an Alaskan sled dog, "
|
||||
"he reverts to atavistic traits. Buck is forced to adjust to, and survive, cruel treatments and fight "
|
||||
"to dominate other dogs in a harsh climate. Eventually, he sheds the veneer of civilization, relying "
|
||||
"on primordial instincts and lessons he learns, to emerge as a leader in the wild. London drew on his "
|
||||
"own experiences in the Klondike, and the book provides a snapshot of the epical gold rush and the "
|
||||
"harsh realities of life in the wilderness. The novel explores themes of morality versus instinct, "
|
||||
"the struggle for survival in the natural world, and the intrusion of civilization on the wilderness. "
|
||||
"As Buck's wild nature is awakened, he rises to become a respected and feared leader in the wild, "
|
||||
"answering the primal call of nature."
|
||||
)
|
||||
}
|
||||
cypher_query = neo4j_graph_db.create_document_node_cypher(call_of_the_wild_summary, decoded_payload['user_id'])
|
||||
|
||||
neo4j_graph_db.query(cypher_query, call_of_the_wild_summary)
|
||||
|
||||
# Execute the query - replace this with the actual execution method
|
||||
async with session_scope(session=AsyncSessionLocal()) as session:
|
||||
# Assuming you have a method in Neo4jGraphDB to execute the query
|
||||
result = await neo4j_graph_db.query(cypher_query, session)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@app.post("/user-document-vectordb")
|
||||
async def generate_document_to_vector_db(payload: Payload, ):
|
||||
try:
|
||||
from database.graph_database.graph import Neo4jGraphDB
|
||||
neo4j_graph_db = Neo4jGraphDB(config.graph_database_url, config.graph_database_username,
|
||||
config.graph_database_password)
|
||||
decoded_payload = payload.payload
|
||||
|
||||
|
||||
neo4j_graph_db.update_document_node_with_namespace(decoded_payload['user_id'], document_title="The Call of the Wild")
|
||||
|
||||
# Execute the query - replace this with the actual execution method
|
||||
# async with session_scope(session=AsyncSessionLocal()) as session:
|
||||
# # Assuming you have a method in Neo4jGraphDB to execute the query
|
||||
# result = await neo4j_graph_db.query(cypher_query, session)
|
||||
#
|
||||
# return result
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
|
||||
@app.post("/{memory_type}/fetch-memory", response_model=dict)
|
||||
async def fetch_memory(
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
):
|
||||
try:
|
||||
logging.info(" Adding to Memory ")
|
||||
decoded_payload = payload.payload
|
||||
async with session_scope(session=AsyncSessionLocal()) as session:
|
||||
memory = await Memory.create_memory(
|
||||
decoded_payload["user_id"], session, "SEMANTICMEMORY", namespace="SEMANTICMEMORY"
|
||||
)
|
||||
|
||||
# Adding a memory instance
|
||||
await memory.add_memory_instance(decoded_payload["memory_object"])
|
||||
|
||||
# Managing memory attributes
|
||||
existing_user = await Memory.check_existing_user(
|
||||
decoded_payload["user_id"], session
|
||||
)
|
||||
await memory.manage_memory_attributes(existing_user)
|
||||
await memory.add_dynamic_memory_class(
|
||||
decoded_payload["memory_object"],
|
||||
decoded_payload["memory_object"].upper(),
|
||||
)
|
||||
memory_class = decoded_payload["memory_object"] + "_class"
|
||||
dynamic_memory_class = getattr(memory, memory_class.lower(), None)
|
||||
|
||||
await memory.add_method_to_class(dynamic_memory_class, "add_memories")
|
||||
# await memory.add_method_to_class(memory.semanticmemory_class, 'fetch_memories')
|
||||
output = await memory.dynamic_method_call(
|
||||
dynamic_memory_class,
|
||||
"fetch_memories",
|
||||
observation=decoded_payload["observation"],
|
||||
)
|
||||
return JSONResponse(content={"response": output}, status_code=200)
|
||||
|
||||
except Exception as e:
|
||||
return JSONResponse(
|
||||
content={"response": {"error": str(e)}}, status_code=503
|
||||
)
|
||||
|
||||
@app.post("/{memory_type}/delete-memory", response_model=dict)
|
||||
async def delete_memory(
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
):
|
||||
try:
|
||||
logging.info(" Adding to Memory ")
|
||||
decoded_payload = payload.payload
|
||||
async with session_scope(session=AsyncSessionLocal()) as session:
|
||||
memory = await Memory.create_memory(
|
||||
decoded_payload["user_id"], session, namespace="SEMANTICMEMORY"
|
||||
)
|
||||
|
||||
# Adding a memory instance
|
||||
await memory.add_memory_instance(decoded_payload["memory_object"])
|
||||
|
||||
# Managing memory attributes
|
||||
existing_user = await Memory.check_existing_user(
|
||||
decoded_payload["user_id"], session
|
||||
)
|
||||
await memory.manage_memory_attributes(existing_user)
|
||||
await memory.add_dynamic_memory_class(
|
||||
decoded_payload["memory_object"],
|
||||
decoded_payload["memory_object"].upper(),
|
||||
)
|
||||
memory_class = decoded_payload["memory_object"] + "_class"
|
||||
dynamic_memory_class = getattr(memory, memory_class.lower(), None)
|
||||
|
||||
await memory.add_method_to_class(
|
||||
dynamic_memory_class, "delete_memories"
|
||||
)
|
||||
# await memory.add_method_to_class(memory.semanticmemory_class, 'fetch_memories')
|
||||
output = await memory.dynamic_method_call(
|
||||
dynamic_memory_class,
|
||||
"delete_memories",
|
||||
namespace=decoded_payload["memory_object"].upper(),
|
||||
)
|
||||
return JSONResponse(content={"response": output}, status_code=200)
|
||||
|
||||
except Exception as e:
|
||||
return JSONResponse(
|
||||
content={"response": {"error": str(e)}}, status_code=503
|
||||
)
|
||||
|
||||
|
||||
memory_list = [ "semantic"]
|
||||
for memory_type in memory_list:
|
||||
memory_factory(memory_type)
|
||||
class TestSetType(Enum):
|
||||
SAMPLE = "sample"
|
||||
MANUAL = "manual"
|
||||
|
||||
def get_test_set(test_set_type, folder_path="example_data", payload=None):
|
||||
if test_set_type == TestSetType.SAMPLE:
|
||||
file_path = os.path.join(folder_path, "test_set.json")
|
||||
if os.path.isfile(file_path):
|
||||
with open(file_path, "r") as file:
|
||||
return json.load(file)
|
||||
elif test_set_type == TestSetType.MANUAL:
|
||||
# Check if the manual test set is provided in the payload
|
||||
if payload and "manual_test_set" in payload:
|
||||
return payload["manual_test_set"]
|
||||
else:
|
||||
# Attempt to load the manual test set from a file
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class MetadataType(Enum):
|
||||
SAMPLE = "sample"
|
||||
MANUAL = "manual"
|
||||
|
||||
def get_metadata(metadata_type, folder_path="example_data", payload=None):
|
||||
if metadata_type == MetadataType.SAMPLE:
|
||||
file_path = os.path.join(folder_path, "metadata.json")
|
||||
if os.path.isfile(file_path):
|
||||
with open(file_path, "r") as file:
|
||||
return json.load(file)
|
||||
elif metadata_type == MetadataType.MANUAL:
|
||||
# Check if the manual metadata is provided in the payload
|
||||
if payload and "manual_metadata" in payload:
|
||||
return payload["manual_metadata"]
|
||||
else:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# @app.post("/rag-test/rag_test_run", response_model=dict)
|
||||
# async def rag_test_run(
|
||||
# payload: Payload,
|
||||
# background_tasks: BackgroundTasks,
|
||||
# ):
|
||||
# try:
|
||||
# logging.info("Starting RAG Test")
|
||||
# decoded_payload = payload.payload
|
||||
# test_set_type = TestSetType(decoded_payload['test_set'])
|
||||
#
|
||||
# metadata_type = MetadataType(decoded_payload['metadata'])
|
||||
#
|
||||
# metadata = get_metadata(metadata_type, payload=decoded_payload)
|
||||
# if metadata is None:
|
||||
# return JSONResponse(content={"response": "Invalid metadata value"}, status_code=400)
|
||||
#
|
||||
# test_set = get_test_set(test_set_type, payload=decoded_payload)
|
||||
# if test_set is None:
|
||||
# return JSONResponse(content={"response": "Invalid test_set value"}, status_code=400)
|
||||
#
|
||||
# async def run_start_test(data, test_set, user_id, params, metadata, retriever_type):
|
||||
# result = await start_test(data = data, test_set = test_set, user_id =user_id, params =params, metadata =metadata, retriever_type=retriever_type)
|
||||
#
|
||||
# logging.info("Retriever DATA type", type(decoded_payload['data']))
|
||||
#
|
||||
# background_tasks.add_task(
|
||||
# run_start_test,
|
||||
# decoded_payload['data'],
|
||||
# test_set,
|
||||
# decoded_payload['user_id'],
|
||||
# decoded_payload['params'],
|
||||
# metadata,
|
||||
# decoded_payload['retriever_type']
|
||||
# )
|
||||
#
|
||||
# logging.info("Retriever type", decoded_payload['retriever_type'])
|
||||
# return JSONResponse(content={"response": "Task has been started"}, status_code=200)
|
||||
#
|
||||
# except Exception as e:
|
||||
# return JSONResponse(
|
||||
#
|
||||
# content={"response": {"error": str(e)}}, status_code=503
|
||||
#
|
||||
# )
|
||||
|
||||
|
||||
# @app.get("/rag-test/{task_id}")
|
||||
# async def check_task_status(task_id: int):
|
||||
# task_status = task_status_db.get(task_id, "not_found")
|
||||
#
|
||||
# if task_status == "not_found":
|
||||
# return {"status": "Task not found"}
|
||||
#
|
||||
# return {"status": task_status}
|
||||
|
||||
# @app.get("/available-buffer-actions", response_model=dict)
|
||||
# async def available_buffer_actions(
|
||||
# payload: Payload,
|
||||
# # files: List[UploadFile] = File(...),
|
||||
# ):
|
||||
# try:
|
||||
# decoded_payload = payload.payload
|
||||
#
|
||||
# Memory_ = Memory(user_id=decoded_payload["user_id"])
|
||||
#
|
||||
# await Memory_.async_init()
|
||||
#
|
||||
# # memory_class = getattr(Memory_, f"_delete_{memory_type}_memory", None)
|
||||
# output = await Memory_._available_operations()
|
||||
# return JSONResponse(content={"response": output}, status_code=200)
|
||||
#
|
||||
# except Exception as e:
|
||||
# return JSONResponse(content={"response": {"error": str(e)}}, status_code=503)
|
||||
|
||||
|
||||
# @app.post("/run-buffer", response_model=dict)
|
||||
# async def run_buffer(
|
||||
# payload: Payload,
|
||||
# # files: List[UploadFile] = File(...),
|
||||
# ):
|
||||
# try:
|
||||
# decoded_payload = payload.payload
|
||||
#
|
||||
# Memory_ = Memory(user_id=decoded_payload["user_id"])
|
||||
#
|
||||
# await Memory_.async_init()
|
||||
#
|
||||
# # memory_class = getattr(Memory_, f"_delete_{memory_type}_memory", None)
|
||||
# output = await Memory_._run_main_buffer(
|
||||
# user_input=decoded_payload["prompt"], params=decoded_payload["params"], attention_modulators=decoded_payload["attention_modulators"]
|
||||
# )
|
||||
# return JSONResponse(content={"response": output}, status_code=200)
|
||||
#
|
||||
# except Exception as e:
|
||||
# return JSONResponse(content={"response": {"error": str(e)}}, status_code=503)
|
||||
#
|
||||
#
|
||||
# @app.post("/buffer/create-context", response_model=dict)
|
||||
# async def create_context(
|
||||
# payload: Payload,
|
||||
# # files: List[UploadFile] = File(...),
|
||||
# ):
|
||||
# try:
|
||||
# decoded_payload = payload.payload
|
||||
#
|
||||
# Memory_ = Memory(user_id=decoded_payload["user_id"])
|
||||
#
|
||||
# await Memory_.async_init()
|
||||
#
|
||||
# # memory_class = getattr(Memory_, f"_delete_{memory_type}_memory", None)
|
||||
# output = await Memory_._create_buffer_context(
|
||||
# user_input=decoded_payload["prompt"], params=decoded_payload["params"], attention_modulators=decoded_payload["attention_modulators"]
|
||||
# )
|
||||
# return JSONResponse(content={"response": output}, status_code=200)
|
||||
#
|
||||
# except Exception as e:
|
||||
# return JSONResponse(content={"response": {"error": str(e)}}, status_code=503)
|
||||
#
|
||||
#
|
||||
# @app.post("/buffer/get-tasks", response_model=dict)
|
||||
# async def create_context(
|
||||
# payload: Payload,
|
||||
# # files: List[UploadFile] = File(...),
|
||||
# ):
|
||||
# try:
|
||||
# decoded_payload = payload.payload
|
||||
#
|
||||
# Memory_ = Memory(user_id=decoded_payload["user_id"])
|
||||
#
|
||||
# await Memory_.async_init()
|
||||
#
|
||||
# # memory_class = getattr(Memory_, f"_delete_{memory_type}_memory", None)
|
||||
# output = await Memory_._get_task_list(
|
||||
# user_input=decoded_payload["prompt"], params=decoded_payload["params"], attention_modulators=decoded_payload["attention_modulators"]
|
||||
# )
|
||||
# return JSONResponse(content={"response": output}, status_code=200)
|
||||
#
|
||||
# except Exception as e:
|
||||
# return JSONResponse(content={"response": {"error": str(e)}}, status_code=503)
|
||||
#
|
||||
#
|
||||
# @app.post("/buffer/provide-feedback", response_model=dict)
|
||||
# async def provide_feedback(
|
||||
# payload: Payload,
|
||||
# # files: List[UploadFile] = File(...),
|
||||
# ):
|
||||
# try:
|
||||
# decoded_payload = payload.payload
|
||||
#
|
||||
# Memory_ = Memory(user_id=decoded_payload["user_id"])
|
||||
#
|
||||
# await Memory_.async_init()
|
||||
#
|
||||
# # memory_class = getattr(Memory_, f"_delete_{memory_type}_memory", None)
|
||||
# if decoded_payload["total_score"] is None:
|
||||
#
|
||||
# output = await Memory_._provide_feedback(
|
||||
# user_input=decoded_payload["prompt"], params=decoded_payload["params"], attention_modulators=None, total_score=decoded_payload["total_score"]
|
||||
# )
|
||||
# return JSONResponse(content={"response": output}, status_code=200)
|
||||
# else:
|
||||
# output = await Memory_._provide_feedback(
|
||||
# user_input=decoded_payload["prompt"], params=decoded_payload["params"], attention_modulators=decoded_payload["attention_modulators"], total_score=None
|
||||
# )
|
||||
# return JSONResponse(content={"response": output}, status_code=200)
|
||||
#
|
||||
#
|
||||
# except Exception as e:
|
||||
# return JSONResponse(content={"response": {"error": str(e)}}, status_code=503)
|
||||
def start_api_server(host: str = "0.0.0.0", port: int = 8000):
|
||||
"""
|
||||
Start the API server using uvicorn.
|
||||
|
||||
Parameters:
|
||||
host (str): The host for the server.
|
||||
port (int): The port for the server.
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Starting server at {host}:{port}")
|
||||
uvicorn.run(app, host=host, port=port)
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to start server: {e}")
|
||||
# Here you could add any cleanup code or error recovery code.
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
start_api_server()
|
||||
|
|
@ -23,6 +23,7 @@ class Config:
|
|||
model: str = 'gpt-4-1106-preview'
|
||||
model_endpoint: str = 'openai'
|
||||
openai_key: Optional[str] = os.getenv('OPENAI_API_KEY')
|
||||
openai_temperature: float = float(os.getenv("OPENAI_TEMPERATURE", 0.0))
|
||||
|
||||
# Embedding parameters
|
||||
embedding_model: str = 'openai'
|
||||
|
|
|
|||
|
|
@ -1,25 +1,20 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
# this is needed to import classes from other modules
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
# Get the parent directory of your script and add it to sys.path
|
||||
parent_dir = os.path.dirname(script_dir)
|
||||
sys.path.append(parent_dir)
|
||||
# script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
# # Get the parent directory of your script and add it to sys.path
|
||||
# parent_dir = os.path.dirname(script_dir)
|
||||
# sys.path.append(parent_dir)
|
||||
|
||||
from database.postgres.database import Base
|
||||
|
||||
from database.database import Base, engine
|
||||
import models.memory
|
||||
import models.metadatas
|
||||
import models.operation
|
||||
import models.sessions
|
||||
import models.testoutput
|
||||
import models.testset
|
||||
import models.user
|
||||
import models.docs
|
||||
from sqlalchemy import create_engine, text
|
||||
import psycopg2
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv()
|
||||
import os
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def create_admin_engine(username, password, host, database_name):
|
||||
|
|
@ -55,7 +50,15 @@ if __name__ == "__main__":
|
|||
username = os.getenv('POSTGRES_USER')
|
||||
password = os.getenv('POSTGRES_PASSWORD')
|
||||
database_name = os.getenv('POSTGRES_DB')
|
||||
host = os.getenv('POSTGRES_HOST')
|
||||
environment = os.environ.get("ENVIRONMENT")
|
||||
|
||||
if environment == "local":
|
||||
host = os.getenv('POSTGRES_HOST')
|
||||
|
||||
elif environment == "docker":
|
||||
host = os.getenv('POSTGRES_HOST_DOCKER')
|
||||
else:
|
||||
host = os.getenv('POSTGRES_HOST_DOCKER')
|
||||
|
||||
engine = create_admin_engine(username, password, host, database_name)
|
||||
|
||||
|
|
@ -1,13 +1,5 @@
|
|||
from pydantic import BaseModel
|
||||
from enum import Enum
|
||||
|
||||
import typer
|
||||
import os
|
||||
import uuid
|
||||
# import marvin
|
||||
# from pydantic_settings import BaseSettings
|
||||
from langchain.chains import GraphCypherQAChain
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
# from marvin import ai_classifier
|
||||
# marvin.settings.openai.api_key = os.environ.get("OPENAI_API_KEY")
|
||||
|
||||
|
|
@ -15,54 +7,33 @@ import os
|
|||
|
||||
print(os.getcwd())
|
||||
|
||||
|
||||
from ..models.sessions import (Session)
|
||||
from ..models.testset import TestSet
|
||||
from ..models.testoutput import TestOutput
|
||||
from ..models.metadatas import MetaDatas
|
||||
from ..models.operation import Operation
|
||||
from ..models.docs import DocsModel
|
||||
from ..models.memory import MemoryModel
|
||||
|
||||
from pathlib import Path
|
||||
import networkx as nx
|
||||
|
||||
from langchain.document_loaders import TextLoader
|
||||
from langchain.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain.graphs import Neo4jGraph
|
||||
from langchain.text_splitter import TokenTextSplitter
|
||||
from langchain.vectorstores import Neo4jVector
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
import uuid
|
||||
|
||||
from graphviz import Digraph
|
||||
|
||||
from ..database.database_crud import session_scope
|
||||
from ..database.database import AsyncSessionLocal
|
||||
|
||||
import openai
|
||||
import instructor
|
||||
|
||||
from openai import OpenAI
|
||||
from openai import AsyncOpenAI
|
||||
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import List
|
||||
|
||||
# Adds response_model to ChatCompletion
|
||||
# Allows the return of Pydantic model rather than raw JSON
|
||||
instructor.patch()
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import List
|
||||
from ..utils import format_dict, append_uuid_to_variable_names, create_edge_variable_mapping, create_node_variable_mapping
|
||||
from ...utils import format_dict, append_uuid_to_variable_names, create_edge_variable_mapping, create_node_variable_mapping
|
||||
DEFAULT_PRESET = "promethai_chat"
|
||||
preset_options = [DEFAULT_PRESET]
|
||||
import questionary
|
||||
PROMETHAI_DIR = os.path.join(os.path.expanduser("~"), ".")
|
||||
load_dotenv()
|
||||
|
||||
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
|
||||
from ..config import Config
|
||||
from ...config import Config
|
||||
|
||||
config = Config()
|
||||
config.load()
|
||||
|
|
@ -70,8 +41,10 @@ config.load()
|
|||
print(config.model)
|
||||
print(config.openai_key)
|
||||
|
||||
OPENAI_API_KEY = config.openai_key
|
||||
|
||||
aclient = instructor.patch(OpenAI())
|
||||
|
||||
import logging
|
||||
|
||||
#Execute Cypher queries to create the user and memory components if they don't exist
|
||||
#
|
||||
|
|
@ -144,7 +117,7 @@ class KnowledgeGraph(BaseModel):
|
|||
#
|
||||
|
||||
def generate_graph(input) -> KnowledgeGraph:
|
||||
return openai.ChatCompletion.create(
|
||||
out = aclient.chat.completions.create(
|
||||
model="gpt-4-1106-preview",
|
||||
messages=[
|
||||
{
|
||||
|
|
@ -189,6 +162,7 @@ def generate_graph(input) -> KnowledgeGraph:
|
|||
],
|
||||
response_model=KnowledgeGraph,
|
||||
)
|
||||
return out
|
||||
|
||||
class AbstractGraphDB(ABC):
|
||||
|
||||
|
|
@ -297,9 +271,9 @@ class Neo4jGraphDB(AbstractGraphDB):
|
|||
return create_statements
|
||||
|
||||
# Update the function to generate Cypher CREATE statements for edges with unique variable names
|
||||
def generate_create_statements_for_edges_with_uuid(self, edges, unique_mapping, base_node_mapping):
|
||||
def generate_create_statements_for_edges_with_uuid(self, user_id, edges, unique_mapping, base_node_mapping):
|
||||
create_statements = []
|
||||
with_statement = f"WITH {', '.join(unique_mapping.values())}, user, semantic, episodic, buffer"
|
||||
with_statement = f"WITH {', '.join(unique_mapping.values())}, {user_id}, semantic, episodic, buffer"
|
||||
create_statements.append(with_statement)
|
||||
|
||||
for edge in edges:
|
||||
|
|
@ -310,9 +284,9 @@ class Neo4jGraphDB(AbstractGraphDB):
|
|||
create_statements.append(f"CREATE ({source_variable})-[:{relationship}]->({target_variable})")
|
||||
return create_statements
|
||||
|
||||
def generate_memory_type_relationships_with_uuid_and_time_context(self, nodes, unique_mapping, base_node_mapping):
|
||||
def generate_memory_type_relationships_with_uuid_and_time_context(self, user_id, nodes, unique_mapping, base_node_mapping):
|
||||
create_statements = []
|
||||
with_statement = f"WITH {', '.join(unique_mapping.values())}, user, semantic, episodic, buffer"
|
||||
with_statement = f"WITH {', '.join(unique_mapping.values())}, {user_id}, semantic, episodic, buffer"
|
||||
create_statements.append(with_statement)
|
||||
|
||||
# Loop through each node and create relationships based on memory_type
|
||||
|
|
@ -332,9 +306,9 @@ class Neo4jGraphDB(AbstractGraphDB):
|
|||
|
||||
return create_statements
|
||||
|
||||
def generate_cypher_query_for_user_prompt_decomposition(self, user_id):
|
||||
async def generate_cypher_query_for_user_prompt_decomposition(self, user_id:str, query:str):
|
||||
|
||||
graph: KnowledgeGraph = generate_graph("I walked in the forest yesterday and added to my list I need to buy some milk in the store")
|
||||
graph: KnowledgeGraph = generate_graph(query)
|
||||
graph_dic = graph.dict()
|
||||
|
||||
node_variable_mapping = create_node_variable_mapping(graph_dic['nodes'])
|
||||
|
|
@ -343,9 +317,9 @@ class Neo4jGraphDB(AbstractGraphDB):
|
|||
unique_node_variable_mapping = append_uuid_to_variable_names(node_variable_mapping)
|
||||
unique_edge_variable_mapping = append_uuid_to_variable_names(edge_variable_mapping)
|
||||
create_nodes_statements = self.generate_create_statements_for_nodes_with_uuid(graph_dic['nodes'], unique_node_variable_mapping, node_variable_mapping)
|
||||
create_edges_statements =self.generate_create_statements_for_edges_with_uuid(graph_dic['edges'], unique_node_variable_mapping, node_variable_mapping)
|
||||
create_edges_statements =self.generate_create_statements_for_edges_with_uuid(user_id, graph_dic['edges'], unique_node_variable_mapping, node_variable_mapping)
|
||||
|
||||
memory_type_statements_with_uuid_and_time_context = self.generate_memory_type_relationships_with_uuid_and_time_context(
|
||||
memory_type_statements_with_uuid_and_time_context = self.generate_memory_type_relationships_with_uuid_and_time_context(user_id,
|
||||
graph_dic['nodes'], unique_node_variable_mapping, node_variable_mapping)
|
||||
|
||||
# # Combine all statements
|
||||
|
|
@ -401,8 +375,8 @@ class Neo4jGraphDB(AbstractGraphDB):
|
|||
except Exception as e:
|
||||
return f"An error occurred: {str(e)}"
|
||||
def retrieve_semantic_memory(self, user_id: str):
|
||||
query = """
|
||||
MATCH (user:User {userId: $user_id})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory)
|
||||
query = f"""
|
||||
MATCH (user:User {{userId: {user_id} }})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory)
|
||||
MATCH (semantic)-[:HAS_KNOWLEDGE]->(knowledge)
|
||||
RETURN knowledge
|
||||
"""
|
||||
|
|
@ -509,12 +483,10 @@ class Neo4jGraphDB(AbstractGraphDB):
|
|||
|
||||
return cypher_query
|
||||
|
||||
|
||||
|
||||
def update_document_node_with_namespace(self, user_id: str, vectordb_namespace: str):
|
||||
def update_document_node_with_namespace(self, user_id: str, vectordb_namespace: str, document_title: str):
|
||||
# Generate the Cypher query
|
||||
cypher_query = f'''
|
||||
MATCH (user:User {{userId: $user_id}})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory)-[:HAS_DOCUMENT]->(document:Document)
|
||||
cypher_query = '''
|
||||
MATCH (user:User {userId: $user_id})-[:HAS_SEMANTIC_MEMORY]->(semantic:SemanticMemory)-[:HAS_DOCUMENT]->(document:Document {title: $document_title})
|
||||
SET document.vectordbNamespace = $vectordb_namespace
|
||||
RETURN document
|
||||
'''
|
||||
|
|
@ -522,7 +494,8 @@ class Neo4jGraphDB(AbstractGraphDB):
|
|||
# Parameters for the query
|
||||
parameters = {
|
||||
'user_id': user_id,
|
||||
'vectordb_namespace': vectordb_namespace
|
||||
'vectordb_namespace': vectordb_namespace,
|
||||
'document_title': document_title
|
||||
}
|
||||
|
||||
# Execute the query with the provided parameters
|
||||
|
|
@ -11,10 +11,10 @@ load_dotenv()
|
|||
|
||||
|
||||
# this is needed to import classes from other modules
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
# Get the parent directory of your script and add it to sys.path
|
||||
parent_dir = os.path.dirname(script_dir)
|
||||
sys.path.append(parent_dir)
|
||||
# script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
# # Get the parent directory of your script and add it to sys.path
|
||||
# parent_dir = os.path.dirname(script_dir)
|
||||
# sys.path.append(parent_dir)
|
||||
|
||||
|
||||
# in seconds
|
||||
|
|
@ -1,6 +1,16 @@
|
|||
|
||||
from contextlib import asynccontextmanager
|
||||
import logging
|
||||
from .models.sessions import Session
|
||||
from .models.memory import MemoryModel
|
||||
from .models.user import User
|
||||
from .models.operation import Operation
|
||||
from .models.testset import TestSet
|
||||
from .models.testoutput import TestOutput
|
||||
from .models.metadatas import MetaDatas
|
||||
from .models.docs import DocsModel
|
||||
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
@ -37,4 +47,18 @@ async def update_entity(session, model, entity_id, new_value):
|
|||
entity.operation_status = new_value
|
||||
return "Successfully updated entity"
|
||||
else:
|
||||
return "Entity not found"
|
||||
return "Entity not found"
|
||||
|
||||
|
||||
async def fetch_job_id(session, user_id=None, memory_id=None, job_id=None):
|
||||
try:
|
||||
result = await session.execute(
|
||||
session.query(Session.id)
|
||||
.filter_by(user_id=user_id, id=job_id)
|
||||
.order_by(Session.created_at)
|
||||
.first()
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logging.error(f"An error occurred: {str(e)}")
|
||||
return None
|
||||
|
|
@ -1,11 +1,10 @@
|
|||
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey
|
||||
from sqlalchemy import Column, String, DateTime, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
from ..database.database import Base
|
||||
from ..database import Base
|
||||
class DocsModel(Base):
|
||||
__tablename__ = 'docs'
|
||||
|
||||
|
|
@ -1,18 +1,17 @@
|
|||
# memory.py
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey
|
||||
from sqlalchemy import Column, String, DateTime, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
from ..database.database import Base
|
||||
|
||||
from ..database import Base
|
||||
class MemoryModel(Base):
|
||||
__tablename__ = 'memories'
|
||||
|
||||
id = Column(String, primary_key=True)
|
||||
user_id = Column(String, ForeignKey('users.id'), index=True)
|
||||
operation_id = Column(String, ForeignKey('operations.id'), index=True)
|
||||
memory_name = Column(String, nullable=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, onupdate=datetime.utcnow)
|
||||
methods_list = Column(String , nullable=True)
|
||||
|
|
@ -1,13 +1,10 @@
|
|||
# metadata.py
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey
|
||||
from sqlalchemy import Column, String, DateTime, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
from ..database.database import Base
|
||||
|
||||
|
||||
from ..database import Base
|
||||
class MetaDatas(Base):
|
||||
__tablename__ = 'metadatas'
|
||||
|
||||
|
|
@ -4,9 +4,7 @@ from sqlalchemy import Column, Integer, String, DateTime, ForeignKey
|
|||
from sqlalchemy.orm import relationship
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
from ..database.database import Base
|
||||
|
||||
from ..database import Base
|
||||
|
||||
class Operation(Base):
|
||||
__tablename__ = 'operations'
|
||||
|
|
@ -4,8 +4,8 @@ from sqlalchemy import Column, Integer, String, DateTime, ForeignKey
|
|||
from sqlalchemy.orm import relationship
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
from ..database.database import Base
|
||||
|
||||
from ..database import Base
|
||||
|
||||
|
||||
class Session(Base):
|
||||
|
|
@ -1,21 +1,14 @@
|
|||
# test_output.py
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
from database.database import Base
|
||||
|
||||
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, String, DateTime, ForeignKey, JSON
|
||||
from sqlalchemy.orm import relationship
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
from ..database.database import Base
|
||||
|
||||
from ..database import Base
|
||||
|
||||
class TestOutput(Base):
|
||||
"""
|
||||
|
|
@ -1,13 +1,10 @@
|
|||
# test_set.py
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey
|
||||
from sqlalchemy import Column, String, DateTime, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
from ..database.database import Base
|
||||
|
||||
|
||||
from ..database import Base
|
||||
class TestSet(Base):
|
||||
__tablename__ = 'test_sets'
|
||||
|
||||
|
|
@ -1,12 +1,11 @@
|
|||
# user.py
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey
|
||||
from sqlalchemy import Column, String, DateTime
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
from ..database.database import Base
|
||||
|
||||
from ..database import Base
|
||||
|
||||
|
||||
class User(Base):
|
||||
|
|
@ -3,23 +3,27 @@ import logging
|
|||
from io import BytesIO
|
||||
import os, sys
|
||||
# Add the parent directory to sys.path
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
from ..vectordb.vectordb import PineconeVectorDB, WeaviateVectorDB
|
||||
# sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
import sqlalchemy as sa
|
||||
print(os.getcwd())
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
import marvin
|
||||
# import marvin
|
||||
import requests
|
||||
from dotenv import load_dotenv
|
||||
from langchain.document_loaders import PyPDFLoader
|
||||
from langchain.retrievers import WeaviateHybridSearchRetriever
|
||||
from weaviate.gql.get import HybridFusion
|
||||
from ..models.sessions import Session
|
||||
from ..models.testset import TestSet
|
||||
from ..models.testoutput import TestOutput
|
||||
from ..models.metadatas import MetaDatas
|
||||
from ..models.operation import Operation
|
||||
|
||||
|
||||
from database.postgres.models.sessions import Session
|
||||
from database.postgres.models.testset import TestSet
|
||||
from database.postgres.models.testoutput import TestOutput
|
||||
from database.postgres.models.metadatas import MetaDatas
|
||||
from database.postgres.models.operation import Operation
|
||||
from database.postgres.models.docs import DocsModel
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from ..database.database import engine
|
||||
from database.postgres.database import engine
|
||||
load_dotenv()
|
||||
from typing import Optional
|
||||
import time
|
||||
|
|
@ -29,7 +33,7 @@ tracemalloc.start()
|
|||
|
||||
from datetime import datetime
|
||||
from langchain.embeddings.openai import OpenAIEmbeddings
|
||||
|
||||
from database.vectordb.vectordb import PineconeVectorDB, WeaviateVectorDB
|
||||
from langchain.schema import Document
|
||||
import uuid
|
||||
import weaviate
|
||||
|
|
@ -38,7 +42,7 @@ import json
|
|||
|
||||
|
||||
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
|
||||
marvin.settings.openai.api_key = os.environ.get("OPENAI_API_KEY")
|
||||
# marvin.settings.openai.api_key = os.environ.get("OPENAI_API_KEY")
|
||||
|
||||
class VectorDBFactory:
|
||||
def __init__(self):
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
from langchain.document_loaders import PyPDFLoader
|
||||
import sys, os
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
from ..shared.chunk_strategy import ChunkStrategy
|
||||
|
||||
from shared.chunk_strategy import ChunkStrategy
|
||||
import re
|
||||
def chunk_data(chunk_strategy=None, source_data=None, chunk_size=None, chunk_overlap=None):
|
||||
|
||||
|
|
@ -2,10 +2,9 @@ from io import BytesIO
|
|||
import fitz
|
||||
import os
|
||||
import sys
|
||||
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from vectordb.chunkers.chunkers import chunk_data
|
||||
from llama_hub.file.base import SimpleDirectoryReader
|
||||
from database.vectordb.chunkers.chunkers import chunk_data
|
||||
|
||||
from langchain.document_loaders import UnstructuredURLLoader
|
||||
from langchain.document_loaders import DirectoryLoader
|
||||
import logging
|
||||
|
|
@ -29,6 +28,7 @@ async def _document_loader( observation: str, loader_settings: dict):
|
|||
if loader_settings.get("source") == "URL":
|
||||
for file in list_of_docs:
|
||||
if document_format == "PDF":
|
||||
logging.info("File is %s", file)
|
||||
pdf_response = requests.get(file)
|
||||
pdf_stream = BytesIO(pdf_response.content)
|
||||
with fitz.open(stream=pdf_stream, filetype='pdf') as doc:
|
||||
|
|
@ -4,7 +4,7 @@ import logging
|
|||
|
||||
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
||||
from marshmallow import Schema, fields
|
||||
from loaders.loaders import _document_loader
|
||||
from database.vectordb.loaders.loaders import _document_loader
|
||||
# Add the parent directory to sys.path
|
||||
|
||||
|
||||
|
|
@ -3,15 +3,18 @@ import logging
|
|||
from sqlalchemy.future import select
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
import marvin
|
||||
from dotenv import load_dotenv
|
||||
from level_4.cognitive_architecture.models import User
|
||||
from level_4.cognitive_architecture.models.memory import MemoryModel
|
||||
|
||||
load_dotenv()
|
||||
import os
|
||||
|
||||
print(os.getcwd())
|
||||
|
||||
|
||||
from database.postgres.models.user import User
|
||||
from database.postgres.models.memory import MemoryModel
|
||||
|
||||
import ast
|
||||
import tracemalloc
|
||||
from level_4.cognitive_architecture.database.database_crud import add_entity
|
||||
from database.postgres.database_crud import add_entity
|
||||
|
||||
tracemalloc.start()
|
||||
|
||||
|
|
@ -21,9 +24,16 @@ import uuid
|
|||
|
||||
load_dotenv()
|
||||
|
||||
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
|
||||
marvin.settings.openai.api_key = os.environ.get("OPENAI_API_KEY")
|
||||
from vectordb.basevectordb import BaseMemory
|
||||
|
||||
|
||||
from database.vectordb.basevectordb import BaseMemory
|
||||
|
||||
from config import Config
|
||||
|
||||
config = Config()
|
||||
config.load()
|
||||
|
||||
|
||||
|
||||
|
||||
class DynamicBaseMemory(BaseMemory):
|
||||
|
|
@ -130,11 +140,10 @@ class Memory:
|
|||
user_id: str = "676",
|
||||
session=None,
|
||||
index_name: str = None,
|
||||
knowledge_source: str = None,
|
||||
knowledge_type: str = None,
|
||||
db_type: str = "weaviate",
|
||||
namespace: str = None,
|
||||
memory_id: str = None,
|
||||
memory_class = None,
|
||||
) -> None:
|
||||
self.load_environment_variables()
|
||||
self.memory_id = memory_id
|
||||
|
|
@ -142,44 +151,51 @@ class Memory:
|
|||
self.session = session
|
||||
self.index_name = index_name
|
||||
self.db_type = db_type
|
||||
self.knowledge_source = knowledge_source
|
||||
self.knowledge_type = knowledge_type
|
||||
self.long_term_memory = None
|
||||
self.short_term_memory = None
|
||||
self.namespace = namespace
|
||||
self.memory_instances = []
|
||||
# inspect and fix this
|
||||
self.memory_class = DynamicBaseMemory(
|
||||
"Memory", user_id, str(self.memory_id), index_name, db_type, namespace
|
||||
)
|
||||
self.memory_class = memory_class
|
||||
# self.memory_class = DynamicBaseMemory(
|
||||
# "Memory", user_id, str(self.memory_id), index_name, db_type, namespace
|
||||
# )
|
||||
|
||||
|
||||
|
||||
def load_environment_variables(self) -> None:
|
||||
load_dotenv()
|
||||
self.OPENAI_TEMPERATURE = float(os.getenv("OPENAI_TEMPERATURE", 0.0))
|
||||
self.OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
|
||||
self.OPENAI_TEMPERATURE = config.openai_temperature
|
||||
self.OPENAI_API_KEY = config.openai_key
|
||||
|
||||
@classmethod
|
||||
async def create_memory(cls, user_id: str, session, **kwargs):
|
||||
async def create_memory(cls, user_id: str, session, memory_label:str, **kwargs):
|
||||
"""
|
||||
Class method that acts as a factory method for creating Memory instances.
|
||||
It performs necessary DB checks or updates before instance creation.
|
||||
"""
|
||||
existing_user = await cls.check_existing_user(user_id, session)
|
||||
logging.info(f"Existing user: {existing_user}")
|
||||
|
||||
if existing_user:
|
||||
# Handle existing user scenario...
|
||||
memory_id = await cls.check_existing_memory(user_id, session)
|
||||
memory_id = await cls.check_existing_memory(user_id,memory_label, session)
|
||||
if memory_id is None:
|
||||
memory_id = await cls.handle_new_memory(user_id, session, memory_name= memory_label)
|
||||
logging.info(
|
||||
f"Existing user {user_id} found in the DB. Memory ID: {memory_id}"
|
||||
)
|
||||
else:
|
||||
# Handle new user scenario...
|
||||
memory_id = await cls.handle_new_user(user_id, session)
|
||||
await cls.handle_new_user(user_id, session)
|
||||
|
||||
memory_id = await cls.handle_new_memory(user_id, session, memory_name= memory_label)
|
||||
logging.info(
|
||||
f"New user {user_id} created in the DB. Memory ID: {memory_id}"
|
||||
)
|
||||
|
||||
return cls(user_id=user_id, session=session, memory_id=memory_id, **kwargs)
|
||||
memory_class = DynamicBaseMemory(
|
||||
memory_label, user_id, str(memory_id), index_name=memory_label, db_type='weaviate', **kwargs
|
||||
)
|
||||
|
||||
return cls(user_id=user_id, session=session, memory_id=memory_id, memory_class=memory_class, **kwargs)
|
||||
|
||||
async def list_memory_classes(self):
|
||||
"""
|
||||
|
|
@ -195,42 +211,85 @@ class Memory:
|
|||
return result.scalar_one_or_none()
|
||||
|
||||
@staticmethod
|
||||
async def check_existing_memory(user_id: str, session):
|
||||
"""Check if a user memory exists in the DB and return it."""
|
||||
result = await session.execute(
|
||||
select(MemoryModel.id).where(MemoryModel.user_id == user_id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
async def check_existing_memory(user_id: str, memory_label:str, session):
|
||||
"""Check if a user memory exists in the DB and return it. Filters by user and label"""
|
||||
try:
|
||||
result = await session.execute(
|
||||
select(MemoryModel.id).where(MemoryModel.user_id == user_id)
|
||||
.filter_by(memory_name=memory_label)
|
||||
.order_by(MemoryModel.created_at)
|
||||
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
except Exception as e:
|
||||
logging.error(f"An error occurred: {str(e)}")
|
||||
return None
|
||||
@staticmethod
|
||||
async def handle_new_user(user_id: str, session):
|
||||
"""Handle new user creation in the DB and return the new memory ID."""
|
||||
"""
|
||||
Handle new user creation in the database.
|
||||
|
||||
# handle these better in terms of retry and error handling
|
||||
memory_id = str(uuid.uuid4())
|
||||
new_user = User(id=user_id)
|
||||
await add_entity(session, new_user)
|
||||
Args:
|
||||
user_id (str): The unique identifier for the new user.
|
||||
session: The database session for the operation.
|
||||
|
||||
Returns:
|
||||
str: A success message or an error message.
|
||||
|
||||
Raises:
|
||||
Exception: If any error occurs during the user creation process.
|
||||
"""
|
||||
try:
|
||||
new_user = User(id=user_id)
|
||||
await add_entity(session, new_user)
|
||||
return "User creation successful."
|
||||
except Exception as e:
|
||||
return f"Error creating user: {str(e)}"
|
||||
|
||||
@staticmethod
|
||||
async def handle_new_memory(user_id: str, session, job_id: str = None, memory_name: str = None):
|
||||
"""
|
||||
Handle new memory creation associated with a user.
|
||||
|
||||
Args:
|
||||
user_id (str): The user's unique identifier.
|
||||
session: The database session for the operation.
|
||||
job_id (str, optional): The identifier of the associated job, if any.
|
||||
memory_name (str, optional): The name of the memory.
|
||||
|
||||
Returns:
|
||||
str: The unique memory ID if successful, or an error message.
|
||||
|
||||
Raises:
|
||||
Exception: If any error occurs during memory creation.
|
||||
"""
|
||||
try:
|
||||
memory_id = str(uuid.uuid4())
|
||||
memory = MemoryModel(
|
||||
id=memory_id,
|
||||
user_id=user_id,
|
||||
operation_id=job_id,
|
||||
memory_name=memory_name,
|
||||
methods_list=str(["Memory", "SemanticMemory", "EpisodicMemory"]),
|
||||
attributes_list=str(
|
||||
[
|
||||
"user_id",
|
||||
"index_name",
|
||||
"db_type",
|
||||
"knowledge_source",
|
||||
"knowledge_type",
|
||||
"memory_id",
|
||||
"long_term_memory",
|
||||
"short_term_memory",
|
||||
"namespace",
|
||||
]
|
||||
),
|
||||
)
|
||||
await add_entity(session, memory)
|
||||
return memory_id
|
||||
except Exception as e:
|
||||
return f"Error creating memory: {str(e)}"
|
||||
|
||||
memory = MemoryModel(
|
||||
id=memory_id,
|
||||
user_id=user_id,
|
||||
methods_list=str(["Memory", "SemanticMemory", "EpisodicMemory"]),
|
||||
attributes_list=str(
|
||||
[
|
||||
"user_id",
|
||||
"index_name",
|
||||
"db_type",
|
||||
"knowledge_source",
|
||||
"knowledge_type",
|
||||
"memory_id",
|
||||
"long_term_memory",
|
||||
"short_term_memory",
|
||||
"namespace",
|
||||
]
|
||||
),
|
||||
)
|
||||
await add_entity(session, memory)
|
||||
return memory_id
|
||||
|
||||
async def add_memory_instance(self, memory_class_name: str):
|
||||
"""Add a new memory instance to the memory_instances list."""
|
||||
|
|
@ -372,22 +431,24 @@ async def main():
|
|||
loader_settings = {
|
||||
"format": "PDF",
|
||||
"source": "URL",
|
||||
"path": "https://www.ibiblio.org/ebooks/London/Call%20of%20Wild.pdf",
|
||||
"path": ["https://www.ibiblio.org/ebooks/London/Call%20of%20Wild.pdf"],
|
||||
}
|
||||
# memory_instance = Memory(namespace='SEMANTICMEMORY')
|
||||
# sss = await memory_instance.dynamic_method_call(memory_instance.semantic_memory_class, 'fetch_memories', observation='some_observation')
|
||||
|
||||
from level_4.cognitive_architecture.database.database_crud import session_scope
|
||||
from level_4.cognitive_architecture.database.database import AsyncSessionLocal
|
||||
from database.postgres.database_crud import session_scope
|
||||
from database.postgres.database import AsyncSessionLocal
|
||||
|
||||
async with session_scope(AsyncSessionLocal()) as session:
|
||||
memory = await Memory.create_memory("676", session, namespace="SEMANTICMEMORY")
|
||||
memory = await Memory.create_memory("677", session, "SEMANTICMEMORY", namespace="SEMANTICMEMORY")
|
||||
ff = memory.memory_instances
|
||||
logging.info("ssss %s", ff)
|
||||
|
||||
# Adding a memory instance
|
||||
await memory.add_memory_instance("ExampleMemory")
|
||||
# await memory.add_memory_instance("ExampleMemory")
|
||||
|
||||
# Managing memory attributes
|
||||
existing_user = await Memory.check_existing_user("676", session)
|
||||
existing_user = await Memory.check_existing_user("677", session)
|
||||
print("here is the existing user", existing_user)
|
||||
await memory.manage_memory_attributes(existing_user)
|
||||
# aeehuvyq_semanticememory_class
|
||||
|
|
@ -401,7 +462,7 @@ async def main():
|
|||
susu = await memory.dynamic_method_call(
|
||||
memory.semanticmemory_class,
|
||||
"fetch_memories",
|
||||
observation="some_observation",
|
||||
observation="document summary",
|
||||
)
|
||||
print(susu)
|
||||
|
||||
|
|
|
|||
|
|
@ -13,29 +13,29 @@ services:
|
|||
networks:
|
||||
- promethai_mem_backend
|
||||
|
||||
# promethai_mem:
|
||||
# networks:
|
||||
# - promethai_mem_backend
|
||||
# build:
|
||||
# context: ./
|
||||
# volumes:
|
||||
# - "./:/app"
|
||||
# - ./.data:/app/.data
|
||||
#
|
||||
# environment:
|
||||
# - HOST=0.0.0.0
|
||||
# profiles: ["exclude-from-up"]
|
||||
# ports:
|
||||
# - 8000:8000
|
||||
# - 443:443
|
||||
# - 80:80
|
||||
# depends_on:
|
||||
# - postgres
|
||||
# deploy:
|
||||
# resources:
|
||||
# limits:
|
||||
# cpus: "4.0"
|
||||
# memory: 8GB
|
||||
promethai_mem:
|
||||
networks:
|
||||
- promethai_mem_backend
|
||||
build:
|
||||
context: ./
|
||||
volumes:
|
||||
- "./:/app"
|
||||
- ./.data:/app/.data
|
||||
|
||||
environment:
|
||||
- HOST=0.0.0.0
|
||||
profiles: ["exclude-from-up"]
|
||||
ports:
|
||||
- 8000:8000
|
||||
- 443:443
|
||||
- 80:80
|
||||
depends_on:
|
||||
- postgres
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: "4.0"
|
||||
memory: 8GB
|
||||
|
||||
|
||||
postgres:
|
||||
|
|
|
|||
222
level_4/main.py
222
level_4/main.py
|
|
@ -1,50 +1,22 @@
|
|||
from enum import Enum
|
||||
|
||||
import typer
|
||||
import os
|
||||
import uuid
|
||||
# import marvin
|
||||
# from pydantic_settings import BaseSettings
|
||||
from langchain.chains import GraphCypherQAChain
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
# from marvin import ai_classifier
|
||||
# marvin.settings.openai.api_key = os.environ.get("OPENAI_API_KEY")
|
||||
from cognitive_architecture.database.graph_database.graph import Neo4jGraphDB
|
||||
from cognitive_architecture.database.postgres.models.memory import MemoryModel
|
||||
|
||||
from cognitive_architecture.models.sessions import Session
|
||||
from cognitive_architecture.models.testset import TestSet
|
||||
from cognitive_architecture.models.testoutput import TestOutput
|
||||
from cognitive_architecture.models.metadatas import MetaDatas
|
||||
from cognitive_architecture.models.operation import Operation
|
||||
from cognitive_architecture.models.docs import DocsModel
|
||||
from cognitive_architecture.models.memory import MemoryModel
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from langchain.document_loaders import TextLoader
|
||||
from langchain.embeddings.openai import OpenAIEmbeddings
|
||||
from langchain.graphs import Neo4jGraph
|
||||
from langchain.text_splitter import TokenTextSplitter
|
||||
from langchain.vectorstores import Neo4jVector
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
import uuid
|
||||
|
||||
from graphviz import Digraph
|
||||
from level_4.cognitive_architecture.database.postgres.database_crud import session_scope
|
||||
from cognitive_architecture.database.postgres.database import AsyncSessionLocal
|
||||
|
||||
from cognitive_architecture.database.database_crud import session_scope
|
||||
from cognitive_architecture.database.database import AsyncSessionLocal
|
||||
|
||||
import openai
|
||||
import instructor
|
||||
from openai import OpenAI
|
||||
|
||||
# Adds response_model to ChatCompletion
|
||||
# Allows the return of Pydantic model rather than raw JSON
|
||||
instructor.patch()
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import List
|
||||
instructor.patch(OpenAI())
|
||||
DEFAULT_PRESET = "promethai_chat"
|
||||
preset_options = [DEFAULT_PRESET]
|
||||
import questionary
|
||||
PROMETHAI_DIR = os.path.join(os.path.expanduser("~"), ".")
|
||||
load_dotenv()
|
||||
|
||||
|
|
@ -60,24 +32,15 @@ print(config.openai_key)
|
|||
|
||||
import logging
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
import asyncio
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
async def get_vectordb_namespace(session: AsyncSession, user_id: str):
|
||||
try:
|
||||
result = await session.execute(
|
||||
select(MemoryModel.id).where(MemoryModel.user_id == user_id).order_by(MemoryModel.created_at.desc()).limit(1)
|
||||
select(MemoryModel.memory_name).where(MemoryModel.user_id == user_id).order_by(MemoryModel.created_at.desc())
|
||||
)
|
||||
namespace = result.scalar_one_or_none()
|
||||
namespace = [row[0] for row in result.fetchall()]
|
||||
return namespace
|
||||
except Exception as e:
|
||||
logging.error(f"An error occurred while retrieving the Vectordb_namespace: {str(e)}")
|
||||
|
|
@ -119,10 +82,14 @@ async def update_document_vectordb_namespace(postgres_session: AsyncSession, use
|
|||
if not vectordb_namespace:
|
||||
logging.error("Vectordb_namespace could not be retrieved.")
|
||||
return None
|
||||
|
||||
# Update the Document node in Neo4j with the namespace
|
||||
update_result = update_document_node_with_namespace(user_id, vectordb_namespace)
|
||||
return update_result
|
||||
from cognitive_architecture.database.graph_database.graph import Neo4jGraphDB
|
||||
# Example initialization (replace with your actual connection details)
|
||||
neo4j_graph_db = Neo4jGraphDB(url='bolt://localhost:7687', username='neo4j', password='pleaseletmein')
|
||||
results = []
|
||||
for namespace in vectordb_namespace:
|
||||
update_result = neo4j_graph_db.update_document_node_with_namespace(user_id, namespace)
|
||||
results.append(update_result)
|
||||
return results
|
||||
|
||||
|
||||
|
||||
|
|
@ -185,45 +152,134 @@ async def update_document_vectordb_namespace(postgres_session: AsyncSession, use
|
|||
# }
|
||||
#
|
||||
# execute_cypher_query(rs, parameters)
|
||||
#
|
||||
# async def main():
|
||||
# user_id = "User1"
|
||||
#
|
||||
# async with session_scope(AsyncSessionLocal()) as session:
|
||||
# await update_document_vectordb_namespace(session, user_id)
|
||||
#
|
||||
# # print(rs)
|
||||
#
|
||||
# if __name__ == "__main__":
|
||||
# import asyncio
|
||||
#
|
||||
# asyncio.run(main())
|
||||
#
|
||||
# # config = Config()
|
||||
# # config.load()
|
||||
# #
|
||||
# # print(config.model)
|
||||
# # print(config.openai_key)
|
||||
|
||||
|
||||
|
||||
from cognitive_architecture.database.postgres.database_crud import fetch_job_id
|
||||
import uuid
|
||||
from cognitive_architecture.database.postgres.models.sessions import Session
|
||||
from cognitive_architecture.database.postgres.models.operation import Operation
|
||||
from cognitive_architecture.database.postgres.database_crud import session_scope, add_entity, update_entity, fetch_job_id
|
||||
from cognitive_architecture.database.postgres.models.metadatas import MetaDatas
|
||||
from cognitive_architecture.database.postgres.models.testset import TestSet
|
||||
from cognitive_architecture.database.postgres.models.testoutput import TestOutput
|
||||
from cognitive_architecture.database.postgres.models.docs import DocsModel
|
||||
from cognitive_architecture.database.postgres.models.memory import MemoryModel
|
||||
async def main():
|
||||
user_id = "User1"
|
||||
from cognitive_architecture.graph_database.graph import Neo4jGraphDB
|
||||
# Example initialization (replace with your actual connection details)
|
||||
neo4j_graph_db = Neo4jGraphDB(url='bolt://localhost:7687', username='neo4j', password='pleaseletmein')
|
||||
# Generate the Cypher query for a specific user
|
||||
user_id = 'user123' # Replace with the actual user ID
|
||||
cypher_query = neo4j_graph_db.generate_cypher_query_for_user_prompt_decomposition(user_id)
|
||||
# Execute the generated Cypher query
|
||||
result = neo4j_graph_db.query(cypher_query)
|
||||
user_id = "user"
|
||||
|
||||
# async with session_scope(AsyncSessionLocal()) as session:
|
||||
# await update_document_vectordb_namespace(session, user_id)
|
||||
async with session_scope(AsyncSessionLocal()) as session:
|
||||
# out = await get_vectordb_namespace(session, user_id)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# print(out)
|
||||
|
||||
# job_id = ""
|
||||
# job_id = await fetch_job_id(session, user_id=user_id, job_id=job_id)
|
||||
# if job_id is None:
|
||||
# job_id = str(uuid.uuid4())
|
||||
#
|
||||
# await add_entity(
|
||||
# session,
|
||||
# Operation(
|
||||
# id=job_id,
|
||||
# user_id=user_id,
|
||||
# operation_params="",
|
||||
# number_of_files=2,
|
||||
# operation_status = "RUNNING",
|
||||
# operation_type="",
|
||||
# test_set_id="",
|
||||
# ),
|
||||
# )
|
||||
|
||||
# await update_document_vectordb_namespace(session, user_id)
|
||||
# from cognitive_architecture.graph_database.graph import Neo4jGraphDB
|
||||
# # Example initialization (replace with your actual connection details)
|
||||
neo4j_graph_db = Neo4jGraphDB(url='bolt://localhost:7687', username='neo4j', password='pleaseletmein')
|
||||
# # Generate the Cypher query for a specific user
|
||||
# user_id = 'user123' # Replace with the actual user ID
|
||||
cypher_query = await neo4j_graph_db.generate_cypher_query_for_user_prompt_decomposition(user_id,"I walked in the forest yesterday and added to my list I need to buy some milk in the store")
|
||||
# result = neo4j_graph_db.query(cypher_query)
|
||||
call_of_the_wild_summary = {
|
||||
"user_id": user_id,
|
||||
"document_category": "Classic Literature",
|
||||
"title": "The Call of the Wild",
|
||||
"summary": (
|
||||
"'The Call of the Wild' is a novel by Jack London set in the Yukon during the 1890s Klondike "
|
||||
"Gold Rush—a period when strong sled dogs were in high demand. The novel's central character "
|
||||
"is a dog named Buck, a domesticated dog living at a ranch in the Santa Clara Valley of California "
|
||||
"as the story opens. Stolen from his home and sold into the brutal existence of an Alaskan sled dog, "
|
||||
"he reverts to atavistic traits. Buck is forced to adjust to, and survive, cruel treatments and fight "
|
||||
"to dominate other dogs in a harsh climate. Eventually, he sheds the veneer of civilization, relying "
|
||||
"on primordial instincts and lessons he learns, to emerge as a leader in the wild. London drew on his "
|
||||
"own experiences in the Klondike, and the book provides a snapshot of the epical gold rush and the "
|
||||
"harsh realities of life in the wilderness. The novel explores themes of morality versus instinct, "
|
||||
"the struggle for survival in the natural world, and the intrusion of civilization on the wilderness. "
|
||||
"As Buck's wild nature is awakened, he rises to become a respected and feared leader in the wild, "
|
||||
"answering the primal call of nature."
|
||||
)
|
||||
}
|
||||
rs = neo4j_graph_db.create_document_node_cypher(call_of_the_wild_summary, user_id)
|
||||
|
||||
neo4j_graph_db.query(rs, call_of_the_wild_summary)
|
||||
print(cypher_query)
|
||||
|
||||
neo4j_graph_db.update_document_node_with_namespace(user_id, document_title="The Call of the Wild")
|
||||
|
||||
|
||||
|
||||
# await update_document_vectordb_namespace(session, user_id)
|
||||
# # Execute the generated Cypher query
|
||||
# result = neo4j_graph_db.query(cypher_query)
|
||||
|
||||
|
||||
|
||||
params = {
|
||||
"version": "1.0",
|
||||
"agreement_id": "AG123456",
|
||||
"privacy_policy": "https://example.com/privacy",
|
||||
"terms_of_service": "https://example.com/terms",
|
||||
"format": "json",
|
||||
"schema_version": "1.1",
|
||||
"checksum": "a1b2c3d4e5f6",
|
||||
"owner": "John Doe",
|
||||
"license": "MIT",
|
||||
"validity_start": "2023-08-01",
|
||||
"validity_end": "2024-07-31",
|
||||
}
|
||||
loader_settings = {
|
||||
"format": "PDF",
|
||||
"source": "URL",
|
||||
"path": "https://www.ibiblio.org/ebooks/London/Call%20of%20Wild.pdf",
|
||||
}
|
||||
# memory_instance = Memory(namespace='SEMANTICMEMORY')
|
||||
# sss = await memory_instance.dynamic_method_call(memory_instance.semantic_memory_class, 'fetch_memories', observation='some_observation')
|
||||
# from cognitive_architecture.vectorstore_manager import Memory
|
||||
#
|
||||
#
|
||||
# memory = await Memory.create_memory("676", session, namespace="SEMANTICMEMORY")
|
||||
#
|
||||
# # Adding a memory instance
|
||||
# await memory.add_memory_instance("ExampleMemory")
|
||||
#
|
||||
# # Managing memory attributes
|
||||
# existing_user = await Memory.check_existing_user("676", session)
|
||||
# print("here is the existing user", existing_user)
|
||||
# await memory.manage_memory_attributes(existing_user)
|
||||
# # aeehuvyq_semanticememory_class
|
||||
#
|
||||
# await memory.add_dynamic_memory_class("semanticmemory", "SEMANTICMEMORY")
|
||||
# await memory.add_method_to_class(memory.semanticmemory_class, "add_memories")
|
||||
# # await memory.add_method_to_class(memory.semanticmemory_class, "fetch_memories")
|
||||
# sss = await memory.dynamic_method_call(memory.semanticmemory_class, 'add_memories',
|
||||
# observation='some_observation', params=params, loader_settings=loader_settings)
|
||||
|
||||
# print(rs)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import asyncio
|
||||
|
||||
asyncio.run(main())
|
||||
asyncio.run(main())
|
||||
|
||||
|
||||
|
|
|
|||
6127
level_4/poetry.lock
generated
Normal file
6127
level_4/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
52
level_4/pyproject.toml
Normal file
52
level_4/pyproject.toml
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
[tool.poetry]
|
||||
name = "PromethAI_memory"
|
||||
version = "0.1.0"
|
||||
description = "PromethAI memory manager"
|
||||
authors = ["Vasilije Markovic"]
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
langchain = "v0.0.308"
|
||||
|
||||
nltk = "3.8.1"
|
||||
openai = "0.27.8"
|
||||
pinecone-client = "2.2.2"
|
||||
python-dotenv = "1.0.0"
|
||||
pyyaml = "6.0"
|
||||
fastapi = "^0.98.0"
|
||||
uvicorn = "0.22.0"
|
||||
pexpect = "^4.8.0"
|
||||
boto3 = "^1.26.125"
|
||||
gptcache = "^0.1.22"
|
||||
gunicorn = "^20.1.0"
|
||||
tiktoken = "^0.4.0"
|
||||
spacy = "^3.5.3"
|
||||
python-jose = "^3.3.0"
|
||||
pypdf = "^3.12.0"
|
||||
fastjsonschema = "^2.18.0"
|
||||
marvin = "^1.3.0"
|
||||
dlt = { version ="^0.3.8", extras = ["duckdb"]}
|
||||
weaviate-client = "^3.22.1"
|
||||
python-multipart = "^0.0.6"
|
||||
deepeval = "^0.20.12"
|
||||
pymupdf = "^1.23.3"
|
||||
psycopg2 = "^2.9.8"
|
||||
llama-index = "^0.8.39.post2"
|
||||
llama-hub = "^0.0.34"
|
||||
sqlalchemy = "^2.0.21"
|
||||
asyncpg = "^0.28.0"
|
||||
dash = "^2.14.0"
|
||||
unstructured = {extras = ["pdf"], version = "^0.10.23"}
|
||||
sentence-transformers = "2.2.2"
|
||||
torch = "2.0.*"
|
||||
segment-analytics-python = "^2.2.3"
|
||||
pdf2image = "^1.16.3"
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
Loading…
Add table
Reference in a new issue