Merge pull request #469 from topoteretes/feature/cog-1084-modal-example-for-cognee
feat: implements modal wrapper + dockerfile for modal containers
This commit is contained in:
commit
8793e4803f
3 changed files with 148 additions and 0 deletions
32
Dockerfile_modal
Normal file
32
Dockerfile_modal
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
FROM python:3.11-slim
|
||||||
|
|
||||||
|
# Set environment variables
|
||||||
|
ENV PIP_NO_CACHE_DIR=true
|
||||||
|
ENV PATH="${PATH}:/root/.poetry/bin"
|
||||||
|
ENV PYTHONPATH=/app
|
||||||
|
ENV RUN_MODE=modal
|
||||||
|
ENV SKIP_MIGRATIONS=true
|
||||||
|
|
||||||
|
# System dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
gcc \
|
||||||
|
libpq-dev \
|
||||||
|
git \
|
||||||
|
curl \
|
||||||
|
build-essential \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
|
||||||
|
ENV PYTHONPATH=/app
|
||||||
|
WORKDIR /app
|
||||||
|
COPY pyproject.toml poetry.lock /app/
|
||||||
|
|
||||||
|
|
||||||
|
RUN pip install poetry
|
||||||
|
|
||||||
|
RUN poetry install --all-extras --no-root --without dev
|
||||||
|
|
||||||
|
COPY cognee/ /app/cognee
|
||||||
|
COPY README.md /app/README.md
|
||||||
22
README.md
22
README.md
|
|
@ -241,6 +241,28 @@ Please see the cognee [Development Guide](https://docs.cognee.ai/quickstart/) fo
|
||||||
```bash
|
```bash
|
||||||
pip install cognee
|
pip install cognee
|
||||||
```
|
```
|
||||||
|
### Deployment at Scale (Modal)
|
||||||
|
|
||||||
|
Scale cognee in 4(+1) simple steps to handle enterprise workloads using [Modal](https://modal.com)'s GPU-powered infrastructure
|
||||||
|
|
||||||
|
**1. Install the modal python client**
|
||||||
|
```bash
|
||||||
|
pip install modal
|
||||||
|
```
|
||||||
|
**2. Create a free account on [Modal](https://modal.com)**
|
||||||
|
|
||||||
|
**3. Set Up Modal API Key**
|
||||||
|
```bash
|
||||||
|
modal token set --token-id TOKEN_ID --token-secret TOKEN_SECRET --profile=PROFILE
|
||||||
|
modal profile activate PROFILE
|
||||||
|
```
|
||||||
|
**4. Run cognee example**
|
||||||
|
|
||||||
|
This simple example will deploy separate cognee instances building their own memory stores and answering a list of questions at scale.
|
||||||
|
```bash
|
||||||
|
modal run -d modal_deployment.py
|
||||||
|
```
|
||||||
|
**5. Change the modal_deploy script and develop your own AI memory at scale 🚀**
|
||||||
|
|
||||||
## 💫 Contributors
|
## 💫 Contributors
|
||||||
|
|
||||||
|
|
|
||||||
94
modal_deployment.py
Normal file
94
modal_deployment.py
Normal file
|
|
@ -0,0 +1,94 @@
|
||||||
|
import modal
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import asyncio
|
||||||
|
import cognee
|
||||||
|
import signal
|
||||||
|
|
||||||
|
from cognee.api.v1.search import SearchType
|
||||||
|
from cognee.shared.utils import setup_logging
|
||||||
|
|
||||||
|
app = modal.App("cognee-runner")
|
||||||
|
|
||||||
|
image = (
|
||||||
|
modal.Image.from_dockerfile(path="Dockerfile_modal", force_build=False)
|
||||||
|
.copy_local_file("pyproject.toml", "pyproject.toml")
|
||||||
|
.copy_local_file("poetry.lock", "poetry.lock")
|
||||||
|
.env({"ENV": os.getenv("ENV"), "LLM_API_KEY": os.getenv("LLM_API_KEY")})
|
||||||
|
.poetry_install_from_file(poetry_pyproject_toml="pyproject.toml")
|
||||||
|
.pip_install("protobuf", "h2")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@app.function(image=image, concurrency_limit=10)
|
||||||
|
async def entry(text: str, query: str):
|
||||||
|
setup_logging(logging.ERROR)
|
||||||
|
await cognee.prune.prune_data()
|
||||||
|
await cognee.prune.prune_system(metadata=True)
|
||||||
|
await cognee.add(text)
|
||||||
|
await cognee.cognify()
|
||||||
|
search_results = await cognee.search(SearchType.GRAPH_COMPLETION, query_text=query)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"text": text,
|
||||||
|
"query": query,
|
||||||
|
"answer": search_results[0] if search_results else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@app.local_entrypoint()
|
||||||
|
async def main():
|
||||||
|
text_queries = [
|
||||||
|
{
|
||||||
|
"text": "NASA's Artemis program aims to return humans to the Moon by 2026, focusing on sustainable exploration and preparing for future Mars missions.",
|
||||||
|
"query": "When does NASA plan to return humans to the Moon under the Artemis program?",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "According to a 2022 UN report, global food waste amounts to approximately 931 million tons annually, with households contributing 61% of the total.",
|
||||||
|
"query": "How much food waste do households contribute annually according to the 2022 UN report?",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "The 2021 census data revealed that Tokyo's population reached 14 million, reflecting a 2.1% increase compared to the previous census conducted in 2015.",
|
||||||
|
"query": "What was Tokyo's population according to the 2021 census data?",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "A recent study published in the Journal of Nutrition found that consuming 30 grams of almonds daily can lower LDL cholesterol levels by 7% over a 12-week period.",
|
||||||
|
"query": "How much can daily almond consumption lower LDL cholesterol according to the study?",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "Amazon's Prime membership grew to 200 million subscribers in 2023, marking a 10% increase from the previous year, driven by exclusive content and faster delivery options.",
|
||||||
|
"query": "How many Prime members did Amazon have in 2023?",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "A new report by the International Energy Agency states that global renewable energy capacity increased by 295 gigawatts in 2022, primarily driven by solar and wind power expansion.",
|
||||||
|
"query": "By how much did global renewable energy capacity increase in 2022 according to the report?",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "The World Health Organization reported in 2023 that the global life expectancy has risen to 73.4 years, an increase of 5.5 years since the year 2000.",
|
||||||
|
"query": "What is the current global life expectancy according to the WHO's 2023 report?",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "The FIFA World Cup 2022 held in Qatar attracted a record-breaking audience of 5 billion people across various digital and traditional broadcasting platforms.",
|
||||||
|
"query": "How many people watched the FIFA World Cup 2022?",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "The European Space Agency's JUICE mission, launched in 2023, aims to explore Jupiter's icy moons, including Ganymede, Europa, and Callisto, over the next decade.",
|
||||||
|
"query": "Which moons is the JUICE mission set to explore?",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"text": "According to a report by the International Labour Organization, the global unemployment rate in 2023 was estimated at 5.4%, reflecting a slight decrease compared to the previous year.",
|
||||||
|
"query": "What was the global unemployment rate in 2023 according to the ILO?",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
tasks = [entry.remote.aio(item["text"], item["query"]) for item in text_queries]
|
||||||
|
|
||||||
|
results = await asyncio.gather(*tasks)
|
||||||
|
|
||||||
|
print("\nFinal Results:")
|
||||||
|
|
||||||
|
for result in results:
|
||||||
|
print(result)
|
||||||
|
print("----")
|
||||||
|
|
||||||
|
os.kill(os.getpid(), signal.SIGTERM)
|
||||||
Loading…
Add table
Reference in a new issue