Updated the entire script and testing with open source books
This commit is contained in:
parent
e12a424ab4
commit
40b93f11ca
3 changed files with 813 additions and 515 deletions
|
|
@ -44,10 +44,7 @@ Here is a payload example:
|
|||
{
|
||||
"payload": {
|
||||
"user_id": "681",
|
||||
"session_id": "471",
|
||||
"model_speed": "slow",
|
||||
"prompt": "I want ",
|
||||
"pdf_url": "https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf",
|
||||
"params": {
|
||||
"version": "1.0",
|
||||
"agreement_id": "AG123456",
|
||||
|
|
@ -60,7 +57,47 @@ Here is a payload example:
|
|||
"license": "MIT",
|
||||
"validity_start": "2023-08-01",
|
||||
"validity_end": "2024-07-31"
|
||||
}
|
||||
},
|
||||
"loader_settings": {
|
||||
"format": "PDF", // or "HTML" // or 'DOCX' // or 'TXT'
|
||||
"source": "url", // or "file"
|
||||
"path": "https://www.ibiblio.org/ebooks/London/Call%20of%20Wild.pdf"
|
||||
},
|
||||
"attention_modulators": {
|
||||
"relevance": 0.5,
|
||||
"saliency": 0.5,
|
||||
"frequency": 0.5,
|
||||
"repetition": 0.5,
|
||||
"length": 0.5,
|
||||
"position": 0.5,
|
||||
"context": 0.5,
|
||||
"emotion": 0.5,
|
||||
"sentiment": 0.5,
|
||||
"perspective": 0.5,
|
||||
"style": 0.5,
|
||||
"grammar": 0.5,
|
||||
"spelling": 0.5,
|
||||
"logic": 0.5,
|
||||
"coherence": 0.5,
|
||||
"cohesion": 0.5,
|
||||
"plausibility": 0.5,
|
||||
"consistency": 0.5,
|
||||
"informativeness": 0.5,
|
||||
"specificity": 0.5,
|
||||
"detail": 0.5,
|
||||
"accuracy": 0.5,
|
||||
"topicality": 0.5,
|
||||
"focus": 0.5,
|
||||
"clarity": 0.5,
|
||||
"simplicity": 0.5,
|
||||
"naturalness": 0.5,
|
||||
"fluency": 0.5,
|
||||
"variety": 0.5,
|
||||
"vividness": 0.5,
|
||||
"originality": 0.5,
|
||||
"creativity": 0.5,
|
||||
"humor": 0.5,
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
213
level_2/api.py
213
level_2/api.py
|
|
@ -18,6 +18,7 @@ from fastapi import HTTPException
|
|||
from fastapi import FastAPI, UploadFile, File
|
||||
from typing import List
|
||||
import requests
|
||||
|
||||
# Set up logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO, # Set the logging level (e.g., DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||
|
|
@ -42,15 +43,16 @@ class ImageResponse(BaseModel):
|
|||
message: str
|
||||
|
||||
|
||||
|
||||
|
||||
@app.get("/", )
|
||||
@app.get(
|
||||
"/",
|
||||
)
|
||||
async def root():
|
||||
"""
|
||||
Root endpoint that returns a welcome message.
|
||||
"""
|
||||
return {"message": "Hello, World, I am alive!"}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
def health_check():
|
||||
"""
|
||||
|
|
@ -59,16 +61,13 @@ def health_check():
|
|||
return {"status": "OK"}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#curl -X POST -H "Content-Type: application/json" -d '{"data": "YourPayload"}' -F "files=@/path/to/your/pdf/file.pdf" http://127.0.0.1:8000/upload/
|
||||
# curl -X POST -H "Content-Type: application/json" -d '{"data": "YourPayload"}' -F "files=@/path/to/your/pdf/file.pdf" http://127.0.0.1:8000/upload/
|
||||
|
||||
|
||||
class Payload(BaseModel):
|
||||
payload: Dict[str, Any]
|
||||
|
||||
|
||||
# @app.post("/upload/", response_model=dict)
|
||||
# async def upload_pdf_and_payload(
|
||||
# payload: Payload,
|
||||
|
|
@ -132,112 +131,94 @@ class Payload(BaseModel):
|
|||
|
||||
def memory_factory(memory_type):
|
||||
load_dotenv()
|
||||
|
||||
class Payload(BaseModel):
|
||||
payload: Dict[str, Any]
|
||||
|
||||
@app.post("/{memory_type}/add-memory", response_model=dict)
|
||||
async def add_memory(
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
):
|
||||
try:
|
||||
|
||||
logging.info(" Init PDF processing")
|
||||
|
||||
|
||||
decoded_payload = payload.payload
|
||||
|
||||
if 'pdf_url' in decoded_payload:
|
||||
pdf_response = requests.get(decoded_payload['pdf_url'])
|
||||
pdf_content = pdf_response.content
|
||||
Memory_ = Memory(user_id=decoded_payload["user_id"])
|
||||
|
||||
logging.info("Downloaded PDF from URL")
|
||||
await Memory_.async_init()
|
||||
|
||||
# Create an in-memory file-like object for the PDF content
|
||||
pdf_stream = BytesIO(pdf_content)
|
||||
|
||||
contents = pdf_stream.read()
|
||||
|
||||
tmp_location = os.path.join('/tmp', "tmp.pdf")
|
||||
with open(tmp_location, 'wb') as tmp_file:
|
||||
tmp_file.write(contents)
|
||||
|
||||
logging.info("Wrote PDF from URL")
|
||||
|
||||
# Process the PDF using PyPDFLoader
|
||||
loader = PyPDFLoader(tmp_location)
|
||||
# pages = loader.load_and_split()
|
||||
logging.info(" PDF split into pages")
|
||||
|
||||
Memory_ = Memory(user_id=decoded_payload['user_id'])
|
||||
|
||||
await Memory_.async_init()
|
||||
|
||||
memory_class = getattr(Memory_, f"_add_{memory_type}_memory", None)
|
||||
output= await memory_class(observation=str(loader), params =decoded_payload['params'])
|
||||
return JSONResponse(content={"response": output}, status_code=200)
|
||||
memory_class = getattr(Memory_, f"_add_{memory_type}_memory", None)
|
||||
output = await memory_class(
|
||||
observation=decoded_payload["prompt"],
|
||||
loader_settings=decoded_payload["loader_settings"],
|
||||
params=decoded_payload["params"],
|
||||
)
|
||||
return JSONResponse(content={"response": output}, status_code=200)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
return JSONResponse(content={"response": {"error": str(e)}}, status_code=503)
|
||||
return JSONResponse(
|
||||
content={"response": {"error": str(e)}}, status_code=503
|
||||
)
|
||||
|
||||
@app.post("/{memory_type}/fetch-memory", response_model=dict)
|
||||
async def fetch_memory(
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
):
|
||||
try:
|
||||
|
||||
decoded_payload = payload.payload
|
||||
|
||||
Memory_ = Memory(user_id=decoded_payload['user_id'])
|
||||
Memory_ = Memory(user_id=decoded_payload["user_id"])
|
||||
|
||||
await Memory_.async_init()
|
||||
|
||||
memory_class = getattr(Memory_, f"_fetch_{memory_type}_memory", None)
|
||||
output = memory_class(observation=decoded_payload['prompt'])
|
||||
output = memory_class(observation=decoded_payload["prompt"])
|
||||
return JSONResponse(content={"response": output}, status_code=200)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
return JSONResponse(content={"response": {"error": str(e)}}, status_code=503)
|
||||
return JSONResponse(
|
||||
content={"response": {"error": str(e)}}, status_code=503
|
||||
)
|
||||
|
||||
@app.post("/{memory_type}/delete-memory", response_model=dict)
|
||||
async def delete_memory(
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
):
|
||||
try:
|
||||
|
||||
decoded_payload = payload.payload
|
||||
|
||||
Memory_ = Memory(user_id=decoded_payload['user_id'])
|
||||
Memory_ = Memory(user_id=decoded_payload["user_id"])
|
||||
|
||||
await Memory_.async_init()
|
||||
|
||||
memory_class = getattr(Memory_, f"_delete_{memory_type}_memory", None)
|
||||
output = memory_class(observation=decoded_payload['prompt'])
|
||||
output = memory_class(observation=decoded_payload["prompt"])
|
||||
return JSONResponse(content={"response": output}, status_code=200)
|
||||
|
||||
except Exception as e:
|
||||
return JSONResponse(
|
||||
content={"response": {"error": str(e)}}, status_code=503
|
||||
)
|
||||
|
||||
return JSONResponse(content={"response": {"error": str(e)}}, status_code=503)
|
||||
|
||||
memory_list = ["episodic", "buffer", "semantic"]
|
||||
for memory_type in memory_list:
|
||||
memory_factory(memory_type)
|
||||
|
||||
|
||||
|
||||
@app.get("/available-buffer-actions", response_model=dict)
|
||||
async def available_buffer_actions(
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
):
|
||||
try:
|
||||
|
||||
decoded_payload = payload.payload
|
||||
|
||||
Memory_ = Memory(user_id=decoded_payload['user_id'])
|
||||
Memory_ = Memory(user_id=decoded_payload["user_id"])
|
||||
|
||||
await Memory_.async_init()
|
||||
|
||||
|
|
@ -246,126 +227,76 @@ async def available_buffer_actions(
|
|||
return JSONResponse(content={"response": output}, status_code=200)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
return JSONResponse(content={"response": {"error": str(e)}}, status_code=503)
|
||||
|
||||
|
||||
@app.post("/run-buffer", response_model=dict)
|
||||
async def available_buffer_actions(
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
):
|
||||
try:
|
||||
|
||||
decoded_payload = payload.payload
|
||||
|
||||
Memory_ = Memory(user_id=decoded_payload['user_id'])
|
||||
Memory_ = Memory(user_id=decoded_payload["user_id"])
|
||||
|
||||
await Memory_.async_init()
|
||||
|
||||
# memory_class = getattr(Memory_, f"_delete_{memory_type}_memory", None)
|
||||
output = await Memory_._run_buffer(user_input=decoded_payload['prompt'], params=decoded_payload['params'])
|
||||
output = await Memory_._run_buffer(
|
||||
user_input=decoded_payload["prompt"], params=decoded_payload["params"]
|
||||
)
|
||||
return JSONResponse(content={"response": output}, status_code=200)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
return JSONResponse(content={"response": {"error": str(e)}}, status_code=503)
|
||||
|
||||
|
||||
@app.post("/buffer/create-context", response_model=dict)
|
||||
async def available_buffer_actions(
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
payload: Payload,
|
||||
# files: List[UploadFile] = File(...),
|
||||
):
|
||||
try:
|
||||
|
||||
decoded_payload = payload.payload
|
||||
|
||||
Memory_ = Memory(user_id=decoded_payload['user_id'])
|
||||
Memory_ = Memory(user_id=decoded_payload["user_id"])
|
||||
|
||||
await Memory_.async_init()
|
||||
|
||||
# memory_class = getattr(Memory_, f"_delete_{memory_type}_memory", None)
|
||||
output = await Memory_._create_buffer_context(user_input=decoded_payload['prompt'], params=decoded_payload['params'])
|
||||
output = await Memory_._create_buffer_context(
|
||||
user_input=decoded_payload["prompt"], params=decoded_payload["params"]
|
||||
)
|
||||
return JSONResponse(content={"response": output}, status_code=200)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
return JSONResponse(content={"response": {"error": str(e)}}, status_code=503)
|
||||
|
||||
|
||||
#
|
||||
# # Process each uploaded PDF file
|
||||
# results = []
|
||||
# for file in files:
|
||||
# contents = await file.read()
|
||||
# tmp_location = os.path.join('/tmp', "tmp.pdf")
|
||||
# with open(tmp_location, 'wb') as tmp_file:
|
||||
# tmp_file.write(contents)
|
||||
# loader = PyPDFLoader(tmp_location)
|
||||
# pages = loader.load_and_split()
|
||||
#
|
||||
# stm = ShortTermMemory(user_id=decoded_payload['user_id'])
|
||||
# stm.episodic_buffer.main_buffer(prompt=decoded_payload['prompt'], pages=pages)
|
||||
# # Here you can perform your processing on the PDF contents
|
||||
# results.append({"filename": file.filename, "size": len(contents)})
|
||||
#
|
||||
# return {"message": "Upload successful", "results": results}
|
||||
#
|
||||
# except Exception as e:
|
||||
# return {"error": str(e)}
|
||||
|
||||
|
||||
# @app.post("/clear-cache", response_model=dict)
|
||||
# async def clear_cache(request_data: Payload) -> dict:
|
||||
# """
|
||||
# Endpoint to clear the cache.
|
||||
# # Process each uploaded PDF file
|
||||
# results = []
|
||||
# for file in files:
|
||||
# contents = await file.read()
|
||||
# tmp_location = os.path.join('/tmp', "tmp.pdf")
|
||||
# with open(tmp_location, 'wb') as tmp_file:
|
||||
# tmp_file.write(contents)
|
||||
# loader = PyPDFLoader(tmp_location)
|
||||
# pages = loader.load_and_split()
|
||||
#
|
||||
# Parameters:
|
||||
# request_data (Payload): The request data containing the user and session IDs.
|
||||
# stm = ShortTermMemory(user_id=decoded_payload['user_id'])
|
||||
# stm.episodic_buffer.main_buffer(prompt=decoded_payload['prompt'], pages=pages)
|
||||
# # Here you can perform your processing on the PDF contents
|
||||
# results.append({"filename": file.filename, "size": len(contents)})
|
||||
#
|
||||
# Returns:
|
||||
# dict: A dictionary with a message indicating the cache was cleared.
|
||||
# """
|
||||
# json_payload = request_data.payload
|
||||
# agent = Agent()
|
||||
# agent.set_user_session(json_payload["user_id"], json_payload["session_id"])
|
||||
# try:
|
||||
# agent.clear_cache()
|
||||
# return JSONResponse(content={"response": "Cache cleared"}, status_code=200)
|
||||
# except Exception as e:
|
||||
# raise HTTPException(status_code=500, detail=str(e))
|
||||
# return {"message": "Upload successful", "results": results}
|
||||
#
|
||||
# @app.post("/correct-prompt-grammar", response_model=dict)
|
||||
# async def prompt_to_correct_grammar(request_data: Payload) -> dict:
|
||||
# json_payload = request_data.payload
|
||||
# agent = Agent()
|
||||
# agent.set_user_session(json_payload["user_id"], json_payload["session_id"])
|
||||
# logging.info("Correcting grammar %s", json_payload["prompt_source"])
|
||||
# except Exception as e:
|
||||
# return {"error": str(e)}
|
||||
|
||||
|
||||
#
|
||||
# output = agent.prompt_correction(json_payload["prompt_source"], model_speed= json_payload["model_speed"])
|
||||
# return JSONResponse(content={"response": {"result": json.loads(output)}})
|
||||
|
||||
|
||||
# @app.post("/action-add-zapier-calendar-action", response_model=dict,dependencies=[Depends(auth)])
|
||||
# async def action_add_zapier_calendar_action(
|
||||
# request: Request, request_data: Payload
|
||||
# ) -> dict:
|
||||
# json_payload = request_data.payload
|
||||
# agent = Agent()
|
||||
# agent.set_user_session(json_payload["user_id"], json_payload["session_id"])
|
||||
# # Extract the bearer token from the header
|
||||
# auth_header = request.headers.get("Authorization")
|
||||
# if auth_header:
|
||||
# bearer_token = auth_header.replace("Bearer ", "")
|
||||
# else:
|
||||
# bearer_token = None
|
||||
# outcome = agent.add_zapier_calendar_action(
|
||||
# prompt_base=json_payload["prompt_base"],
|
||||
# token=bearer_token,
|
||||
# model_speed=json_payload["model_speed"],
|
||||
# )
|
||||
# return JSONResponse(content={"response": outcome})
|
||||
|
||||
|
||||
|
||||
def start_api_server(host: str = "0.0.0.0", port: int = 8000):
|
||||
"""
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
Loading…
Add table
Reference in a new issue