From 2410feea4fd2a78ccd3ab1e1130701bd6c2b6435 Mon Sep 17 00:00:00 2001 From: hajdul88 <52442977+hajdul88@users.noreply.github.com> Date: Thu, 23 Jan 2025 18:06:09 +0100 Subject: [PATCH] feat: implements modal wrapper + dockerfile for modal containers --- Dockerfile_modal | 32 ++++++++++++++++++++ modal_wrapper.py | 78 ++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 110 insertions(+) create mode 100644 Dockerfile_modal create mode 100644 modal_wrapper.py diff --git a/Dockerfile_modal b/Dockerfile_modal new file mode 100644 index 000000000..f8ca663a8 --- /dev/null +++ b/Dockerfile_modal @@ -0,0 +1,32 @@ +FROM python:3.11-slim + +# Set environment variables +ENV PIP_NO_CACHE_DIR=true +ENV PATH="${PATH}:/root/.poetry/bin" +ENV PYTHONPATH=/app +ENV RUN_MODE=modal +ENV SKIP_MIGRATIONS=true + +# System dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + libpq-dev \ + git \ + curl \ + build-essential \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + + +ENV PYTHONPATH=/app +WORKDIR /app +COPY pyproject.toml poetry.lock /app/ + + +RUN pip install poetry + +RUN poetry install --all-extras --no-root --without dev + +COPY cognee/ /app/cognee +COPY README.md /app/README.md diff --git a/modal_wrapper.py b/modal_wrapper.py new file mode 100644 index 000000000..70a993a04 --- /dev/null +++ b/modal_wrapper.py @@ -0,0 +1,78 @@ +import modal +import os +import logging +import asyncio +import cognee +import sentry_sdk +import concurrent.futures +import signal + +from cognee.api.v1.search import SearchType +from cognee.shared.utils import setup_logging + +app = modal.App("cognee-runner") + +image = ( + modal.Image.from_dockerfile(path="Dockerfile_modal", force_build=False) + .copy_local_file("pyproject.toml", "pyproject.toml") + .copy_local_file("poetry.lock", "poetry.lock") + .env({"ENV": os.getenv("ENV"), "LLM_API_KEY": os.getenv("LLM_API_KEY")}) + .poetry_install_from_file(poetry_pyproject_toml="pyproject.toml") + .pip_install("protobuf", "h2") +) + + +@app.function(image=image, concurrency_limit=5) +async def entry(text: str, query: str): + try: + setup_logging(logging.ERROR) + sentry_sdk.init(dsn=None) + await cognee.prune.prune_data() + await cognee.prune.prune_system(metadata=True) + await cognee.add(text) + await cognee.cognify() + search_results = await cognee.search(SearchType.GRAPH_COMPLETION, query_text=query) + + return { + "text": text, + "query": query, + "answer": search_results[0] if search_results else None, + } + finally: + await asyncio.sleep(1) + + +@app.local_entrypoint() +async def main(): + text_queries = [ + { + "text": "The Mars 2023 mission discovered 4.3% water content in soil samples from Jezero Crater.", + "query": "What percentage of water was found in Jezero Crater's soil based on the provided context?", + }, + { + "text": "Bluefin tuna populations decreased by 72% in the Mediterranean between 2010-2022 according to WWF.", + "query": "What percentage of water was found in Jezero Crater's soil based on the provided context?", + }, + { + "text": "Tesla's Q2 2024 report shows 412,000 Model Y vehicles produced with new 4680 battery cells.", + "query": "How many Model Y cars used the 4680 batteries in Q2 2024?", + }, + { + "text": "A 2023 Johns Hopkins study found 23-minute daily naps improve cognitive performance by 18% in adults.", + "query": "What duration of daily naps boosts cognition according to the 2023 study?", + }, + ] + + tasks = [entry.remote.aio(item["text"], item["query"]) for item in text_queries] + + results = await asyncio.gather(*tasks) + + print("\nFinal Results:") + + for result in results: + print(result) + print("----") + + os.kill(os.getpid(), signal.SIGKILL) + + return 0