diff --git a/.env.template b/.env.template
index 77c845d4e..ff591c0a7 100644
--- a/.env.template
+++ b/.env.template
@@ -8,14 +8,14 @@ GRAPHISTRY_PASSWORD=
SENTRY_REPORTING_URL=
# "neo4j" or "networkx"
-GRAPH_DATABASE_PROVIDER="neo4j"
+GRAPH_DATABASE_PROVIDER="networkx"
# Not needed if using networkx
GRAPH_DATABASE_URL=
GRAPH_DATABASE_USERNAME=
GRAPH_DATABASE_PASSWORD=
# "qdrant", "pgvector", "weaviate" or "lancedb"
-VECTOR_DB_PROVIDER="qdrant"
+VECTOR_DB_PROVIDER="lancedb"
# Not needed if using "lancedb" or "pgvector"
VECTOR_DB_URL=
VECTOR_DB_KEY=
diff --git a/.github/workflows/test_cognee_llama_index_notebook.yml b/.github/workflows/test_cognee_llama_index_notebook.yml
index c46d0de0d..860eec92b 100644
--- a/.github/workflows/test_cognee_llama_index_notebook.yml
+++ b/.github/workflows/test_cognee_llama_index_notebook.yml
@@ -46,7 +46,7 @@ jobs:
- name: Install dependencies
run: |
- poetry install --no-interaction --all-extras --no-root
+ poetry install --no-interaction --all-extras
poetry add jupyter --no-interaction
- name: Execute Jupyter Notebook
diff --git a/README.md b/README.md
index 82c3730dc..28d5858a0 100644
--- a/README.md
+++ b/README.md
@@ -10,7 +10,7 @@
We build for developers who need a reliable, production-ready data layer for AI applications
-## What is cognee?
+## What is cognee?
Cognee implements scalable, modular ECL (Extract, Cognify, Load) pipelines that allow you to interconnect and retrieve past conversations, documents, and audio transcriptions while reducing hallucinations, developer effort, and cost.
Try it in a Google Colab notebook or have a look at our documentation
@@ -18,9 +18,6 @@ Try it in a Google Colab Discord community
-
-
-
## 📦 Installation
### With pip
@@ -47,6 +44,7 @@ poetry add cognee
poetry add cognee -E postgres
```
+
## 💻 Basic Usage
### Setup
@@ -83,7 +81,7 @@ docker-compose up
```
Then navigate to localhost:3000
-If you want to use the UI with PostgreSQL through docker-compose make sure to set the following values in the .env file:
+If you want to use Cognee with PostgreSQL, make sure to set the following values in the .env file:
```
DB_PROVIDER=postgres
@@ -97,9 +95,7 @@ DB_PASSWORD=cognee
### Simple example
-First, copy `.env.template` to `.env` and add your OpenAI API key to the LLM_API_KEY field.
-
-Optionally, set `VECTOR_DB_PROVIDER="lancedb"` in `.env` to simplify setup.
+First, copy `.env.template` to `.env` and add your OpenAI API key to the LLM_API_KEY field.
This script will run the default pipeline:
@@ -140,7 +136,7 @@ async def main():
asyncio.run(main())
```
-A version of this example is here: `examples/pyton/simple_example.py`
+A version of this example is here: `examples/python/simple_example.py`
### Create your own memory store
@@ -251,7 +247,6 @@ Cognee supports a variety of tools and services for different operations:
Check out our demo notebook [here](https://github.com/topoteretes/cognee/blob/main/notebooks/cognee_demo.ipynb)
-
[
](https://www.youtube.com/watch?v=BDFt4xVPmro "Learn about cognee: 55")
@@ -274,11 +269,6 @@ Please see the cognee [Development Guide](https://topoteretes.github.io/cognee/q
pip install cognee
```
-## Star History
-
-[](https://star-history.com/#topoteretes/cognee&Date)
-
-
## 💫 Contributors
@@ -286,3 +276,25 @@ pip install cognee
+## Star History
+
+[](https://star-history.com/#topoteretes/cognee&Date)
+
+
+## Vector & Graph Databases Implementation State
+
+
+
+| Name | Type | Current state | Known Issues |
+|------------------|--------------------|-------------------|---------------------------------------|
+| Qdrant | Vector | Stable ✅ | |
+| Weaviate | Vector | Stable ✅ | |
+| LanceDB | Vector | Stable ✅ | |
+| Neo4j | Graph | Stable ✅ | |
+| NetworkX | Graph | Stable ✅ | |
+| FalkorDB | Vector/Graph | Unstable ❌ | |
+| PGVector | Vector | Unstable ❌ | Postgres DB returns the Timeout error |
diff --git a/cognee-frontend/src/ui/Partials/SettingsModal/Settings.tsx b/cognee-frontend/src/ui/Partials/SettingsModal/Settings.tsx
index 9d0744323..08574bd6b 100644
--- a/cognee-frontend/src/ui/Partials/SettingsModal/Settings.tsx
+++ b/cognee-frontend/src/ui/Partials/SettingsModal/Settings.tsx
@@ -30,8 +30,8 @@ const defaultProvider = {
};
const defaultModel = {
- label: 'gpt-4o',
- value: 'gpt-4o',
+ label: 'gpt-4o-mini',
+ value: 'gpt-4o-mini',
};
export default function Settings({ onDone = () => {}, submitButtonText = 'Save' }) {
diff --git a/cognee/infrastructure/llm/config.py b/cognee/infrastructure/llm/config.py
index a30fa75c7..37541adf2 100644
--- a/cognee/infrastructure/llm/config.py
+++ b/cognee/infrastructure/llm/config.py
@@ -4,7 +4,7 @@ from pydantic_settings import BaseSettings, SettingsConfigDict
class LLMConfig(BaseSettings):
llm_provider: str = "openai"
- llm_model: str = "gpt-4o"
+ llm_model: str = "gpt-4o-mini"
llm_endpoint: str = ""
llm_api_key: Optional[str] = None
llm_temperature: float = 0.0
diff --git a/cognee/modules/settings/get_settings.py b/cognee/modules/settings/get_settings.py
index 95f2f5924..b67b9d6ab 100644
--- a/cognee/modules/settings/get_settings.py
+++ b/cognee/modules/settings/get_settings.py
@@ -73,6 +73,9 @@ def get_settings() -> SettingsDict:
"providers": llm_providers,
"models": {
"openai": [{
+ "value": "gpt-4o-mini",
+ "label": "gpt-4o-mini",
+ }, {
"value": "gpt-4o",
"label": "gpt-4o",
}, {
diff --git a/examples/python/simple_example.py b/examples/python/simple_example.py
index 4e0e61834..c7d12e5ac 100644
--- a/examples/python/simple_example.py
+++ b/examples/python/simple_example.py
@@ -6,7 +6,6 @@ from cognee.api.v1.search import SearchType
# 1. Copy `.env.template` and rename it to `.env`.
# 2. Add your OpenAI API key to the `.env` file in the `LLM_API_KEY` field:
# LLM_API_KEY = "your_key_here"
-# 3. (Optional) To minimize setup effort, set `VECTOR_DB_PROVIDER="lancedb"` in `.env".
async def main():
# Create a clean slate for cognee -- reset data and system state