From 2410feea4fd2a78ccd3ab1e1130701bd6c2b6435 Mon Sep 17 00:00:00 2001 From: hajdul88 <52442977+hajdul88@users.noreply.github.com> Date: Thu, 23 Jan 2025 18:06:09 +0100 Subject: [PATCH 1/4] feat: implements modal wrapper + dockerfile for modal containers --- Dockerfile_modal | 32 ++++++++++++++++++++ modal_wrapper.py | 78 ++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 110 insertions(+) create mode 100644 Dockerfile_modal create mode 100644 modal_wrapper.py diff --git a/Dockerfile_modal b/Dockerfile_modal new file mode 100644 index 000000000..f8ca663a8 --- /dev/null +++ b/Dockerfile_modal @@ -0,0 +1,32 @@ +FROM python:3.11-slim + +# Set environment variables +ENV PIP_NO_CACHE_DIR=true +ENV PATH="${PATH}:/root/.poetry/bin" +ENV PYTHONPATH=/app +ENV RUN_MODE=modal +ENV SKIP_MIGRATIONS=true + +# System dependencies +RUN apt-get update && apt-get install -y \ + gcc \ + libpq-dev \ + git \ + curl \ + build-essential \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + + +ENV PYTHONPATH=/app +WORKDIR /app +COPY pyproject.toml poetry.lock /app/ + + +RUN pip install poetry + +RUN poetry install --all-extras --no-root --without dev + +COPY cognee/ /app/cognee +COPY README.md /app/README.md diff --git a/modal_wrapper.py b/modal_wrapper.py new file mode 100644 index 000000000..70a993a04 --- /dev/null +++ b/modal_wrapper.py @@ -0,0 +1,78 @@ +import modal +import os +import logging +import asyncio +import cognee +import sentry_sdk +import concurrent.futures +import signal + +from cognee.api.v1.search import SearchType +from cognee.shared.utils import setup_logging + +app = modal.App("cognee-runner") + +image = ( + modal.Image.from_dockerfile(path="Dockerfile_modal", force_build=False) + .copy_local_file("pyproject.toml", "pyproject.toml") + .copy_local_file("poetry.lock", "poetry.lock") + .env({"ENV": os.getenv("ENV"), "LLM_API_KEY": os.getenv("LLM_API_KEY")}) + .poetry_install_from_file(poetry_pyproject_toml="pyproject.toml") + .pip_install("protobuf", "h2") +) + + +@app.function(image=image, concurrency_limit=5) +async def entry(text: str, query: str): + try: + setup_logging(logging.ERROR) + sentry_sdk.init(dsn=None) + await cognee.prune.prune_data() + await cognee.prune.prune_system(metadata=True) + await cognee.add(text) + await cognee.cognify() + search_results = await cognee.search(SearchType.GRAPH_COMPLETION, query_text=query) + + return { + "text": text, + "query": query, + "answer": search_results[0] if search_results else None, + } + finally: + await asyncio.sleep(1) + + +@app.local_entrypoint() +async def main(): + text_queries = [ + { + "text": "The Mars 2023 mission discovered 4.3% water content in soil samples from Jezero Crater.", + "query": "What percentage of water was found in Jezero Crater's soil based on the provided context?", + }, + { + "text": "Bluefin tuna populations decreased by 72% in the Mediterranean between 2010-2022 according to WWF.", + "query": "What percentage of water was found in Jezero Crater's soil based on the provided context?", + }, + { + "text": "Tesla's Q2 2024 report shows 412,000 Model Y vehicles produced with new 4680 battery cells.", + "query": "How many Model Y cars used the 4680 batteries in Q2 2024?", + }, + { + "text": "A 2023 Johns Hopkins study found 23-minute daily naps improve cognitive performance by 18% in adults.", + "query": "What duration of daily naps boosts cognition according to the 2023 study?", + }, + ] + + tasks = [entry.remote.aio(item["text"], item["query"]) for item in text_queries] + + results = await asyncio.gather(*tasks) + + print("\nFinal Results:") + + for result in results: + print(result) + print("----") + + os.kill(os.getpid(), signal.SIGKILL) + + return 0 From 49e10832d6384e9c11ea928e166b055348af89fc Mon Sep 17 00:00:00 2001 From: hajdul88 <52442977+hajdul88@users.noreply.github.com> Date: Fri, 24 Jan 2025 10:17:48 +0100 Subject: [PATCH 2/4] chore: cleans the modal entrypoint --- modal_wrapper.py | 74 +++++++++++++++++++++++++++++------------------- 1 file changed, 45 insertions(+), 29 deletions(-) diff --git a/modal_wrapper.py b/modal_wrapper.py index 70a993a04..5622f96e6 100644 --- a/modal_wrapper.py +++ b/modal_wrapper.py @@ -3,8 +3,6 @@ import os import logging import asyncio import cognee -import sentry_sdk -import concurrent.futures import signal from cognee.api.v1.search import SearchType @@ -22,44 +20,64 @@ image = ( ) -@app.function(image=image, concurrency_limit=5) +@app.function(image=image, concurrency_limit=10) async def entry(text: str, query: str): - try: - setup_logging(logging.ERROR) - sentry_sdk.init(dsn=None) - await cognee.prune.prune_data() - await cognee.prune.prune_system(metadata=True) - await cognee.add(text) - await cognee.cognify() - search_results = await cognee.search(SearchType.GRAPH_COMPLETION, query_text=query) + setup_logging(logging.ERROR) + await cognee.prune.prune_data() + await cognee.prune.prune_system(metadata=True) + await cognee.add(text) + await cognee.cognify() + search_results = await cognee.search(SearchType.GRAPH_COMPLETION, query_text=query) - return { - "text": text, - "query": query, - "answer": search_results[0] if search_results else None, - } - finally: - await asyncio.sleep(1) + return { + "text": text, + "query": query, + "answer": search_results[0] if search_results else None, + } @app.local_entrypoint() async def main(): text_queries = [ { - "text": "The Mars 2023 mission discovered 4.3% water content in soil samples from Jezero Crater.", - "query": "What percentage of water was found in Jezero Crater's soil based on the provided context?", + "text": "NASA's Artemis program aims to return humans to the Moon by 2026, focusing on sustainable exploration and preparing for future Mars missions.", + "query": "When does NASA plan to return humans to the Moon under the Artemis program?", }, { - "text": "Bluefin tuna populations decreased by 72% in the Mediterranean between 2010-2022 according to WWF.", - "query": "What percentage of water was found in Jezero Crater's soil based on the provided context?", + "text": "According to a 2022 UN report, global food waste amounts to approximately 931 million tons annually, with households contributing 61% of the total.", + "query": "How much food waste do households contribute annually according to the 2022 UN report?", }, { - "text": "Tesla's Q2 2024 report shows 412,000 Model Y vehicles produced with new 4680 battery cells.", - "query": "How many Model Y cars used the 4680 batteries in Q2 2024?", + "text": "The 2021 census data revealed that Tokyo's population reached 14 million, reflecting a 2.1% increase compared to the previous census conducted in 2015.", + "query": "What was Tokyo's population according to the 2021 census data?", }, { - "text": "A 2023 Johns Hopkins study found 23-minute daily naps improve cognitive performance by 18% in adults.", - "query": "What duration of daily naps boosts cognition according to the 2023 study?", + "text": "A recent study published in the Journal of Nutrition found that consuming 30 grams of almonds daily can lower LDL cholesterol levels by 7% over a 12-week period.", + "query": "How much can daily almond consumption lower LDL cholesterol according to the study?", + }, + { + "text": "Amazon's Prime membership grew to 200 million subscribers in 2023, marking a 10% increase from the previous year, driven by exclusive content and faster delivery options.", + "query": "How many Prime members did Amazon have in 2023?", + }, + { + "text": "A new report by the International Energy Agency states that global renewable energy capacity increased by 295 gigawatts in 2022, primarily driven by solar and wind power expansion.", + "query": "By how much did global renewable energy capacity increase in 2022 according to the report?", + }, + { + "text": "The World Health Organization reported in 2023 that the global life expectancy has risen to 73.4 years, an increase of 5.5 years since the year 2000.", + "query": "What is the current global life expectancy according to the WHO's 2023 report?", + }, + { + "text": "The FIFA World Cup 2022 held in Qatar attracted a record-breaking audience of 5 billion people across various digital and traditional broadcasting platforms.", + "query": "How many people watched the FIFA World Cup 2022?", + }, + { + "text": "The European Space Agency's JUICE mission, launched in 2023, aims to explore Jupiter's icy moons, including Ganymede, Europa, and Callisto, over the next decade.", + "query": "Which moons is the JUICE mission set to explore?", + }, + { + "text": "According to a report by the International Labour Organization, the global unemployment rate in 2023 was estimated at 5.4%, reflecting a slight decrease compared to the previous year.", + "query": "What was the global unemployment rate in 2023 according to the ILO?", }, ] @@ -73,6 +91,4 @@ async def main(): print(result) print("----") - os.kill(os.getpid(), signal.SIGKILL) - - return 0 + os.kill(os.getpid(), signal.SIGTERM) From 8e36b8be65e4fac398b8eca38945ee51ede7a119 Mon Sep 17 00:00:00 2001 From: hajdul88 <52442977+hajdul88@users.noreply.github.com> Date: Fri, 24 Jan 2025 15:45:16 +0100 Subject: [PATCH 3/4] feat: Updates Readme.md with modal deployment guide --- README.md | 21 +++++++++++++++++++++ modal_wrapper.py => modal_deployment.py | 0 2 files changed, 21 insertions(+) rename modal_wrapper.py => modal_deployment.py (100%) diff --git a/README.md b/README.md index a14ddebc2..46e06a472 100644 --- a/README.md +++ b/README.md @@ -241,6 +241,27 @@ Please see the cognee [Development Guide](https://docs.cognee.ai/quickstart/) fo ```bash pip install cognee ``` +### Deployment at Scale (Modal) + +Scale cognee in 4 simple steps to handle enterprise workloads using [Modal](https://modal.com)'s GPU-powered infrastructure + +**1. Install the modal python client** +```bash +pip install modal +``` +**2. Create a free account on [Modal](https://modal.com)** + +**3. Set Up Modal API Key** +```bash +modal token set --token-id TOKEN_ID --token-secret TOKEN_SECRET --profile=PROFILE +modal profile activate PROFILE +``` +**4. Run cognee example** + +This simple example will deploy separate cognee instances building their own memory stores and answering a list of questions at scale. +```bash +modal run -d modal_deployment.py +``` ## 💫 Contributors diff --git a/modal_wrapper.py b/modal_deployment.py similarity index 100% rename from modal_wrapper.py rename to modal_deployment.py From 048823e7ae71e1113f629e5390ee23dd3d2b5bb6 Mon Sep 17 00:00:00 2001 From: hajdul88 <52442977+hajdul88@users.noreply.github.com> Date: Fri, 24 Jan 2025 15:52:40 +0100 Subject: [PATCH 4/4] feat: changes Readme.md --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 46e06a472..6bd5b30e0 100644 --- a/README.md +++ b/README.md @@ -243,7 +243,7 @@ pip install cognee ``` ### Deployment at Scale (Modal) -Scale cognee in 4 simple steps to handle enterprise workloads using [Modal](https://modal.com)'s GPU-powered infrastructure +Scale cognee in 4(+1) simple steps to handle enterprise workloads using [Modal](https://modal.com)'s GPU-powered infrastructure **1. Install the modal python client** ```bash @@ -262,6 +262,7 @@ This simple example will deploy separate cognee instances building their own mem ```bash modal run -d modal_deployment.py ``` +**5. Change the modal_deploy script and develop your own AI memory at scale 🚀** ## 💫 Contributors