chore: fix typo in .env.template (#1122)

## Description
Fixed a minor typo in `.env.template`. Not the biggest change, but
that's a start ¯\_(ツ)_/¯

## DCO Affirmation
I affirm that all code in every commit of this pull request conforms to
the terms of the Topoteretes Developer Certificate of Origin.
This commit is contained in:
Daulet Amirkhanov 2025-07-21 20:00:53 +01:00 committed by vasilije
parent ed284c820d
commit 3f76fc4e1b

View file

@ -1,112 +1,189 @@
###############################################################################
# NOTE: With default settings Cognee only needs an OpenAI LLM_API_KEY to be set.
# The rest of the settings don't have to be set.
# Default relational database: SQLite
# Default vector database : LanceDB
# Default graph database : Kuzu
#
# These default databases are all file-based, so no extra setup is needed
# for local use.
###############################################################################
################################################################################
# 🧠 LLM Settings
################################################################################
###
### DEV
###
TOKENIZERS_PARALLELISM="false"
###
### LLM
###
###
### simple, "expensive", an OpenAPI key
###
LLM_API_KEY="your_api_key"
LLM_MODEL="openai/gpt-4o-mini"
LLM_PROVIDER="openai"
LLM_ENDPOINT=""
LLM_API_VERSION=""
###
### DEV LLM, cheap with content filters
###
LLM_MODEL="azure/gpt-4o-mini"
LLM_ENDPOINT="https://DNS.azure.com/openai/deployments/gpt-4o-mini"
LLM_API_KEY="<<TALK TO YOUR AZURE GUY"
LLM_API_VERSION="2024-12-01-preview"
#llm api version might not be relevant
LLM_MAX_TOKENS="16384"
EMBEDDING_MODEL="azure/text-embedding-3-large"
EMBEDDING_ENDPOINT="https://DNS.openai.azure.com/openai/deployments/text-embedding-3-large"
EMBEDDING_API_KEY="<<TALK TO YOUR AZURE GUY>"
EMBEDDING_API_VERSION="2024-12-01-preview"
EMBEDDING_DIMENSIONS=3072
EMBEDDING_MAX_TOKENS=8191
###
### free local LLM, install it
###
LLM_API_KEY = "ollama"
LLM_MODEL = "llama3.1:8b"
LLM_PROVIDER = "ollama"
LLM_ENDPOINT = "http://localhost:11434/v1"
EMBEDDING_PROVIDER = "ollama"
EMBEDDING_MODEL = "avr/sfr-embedding-mistral:latest"
EMBEDDING_ENDPOINT = "http://localhost:11434/api/embeddings"
EMBEDDING_DIMENSIONS = 4096
HUGGINGFACE_TOKENIZER = "Salesforce/SFR-Embedding-Mistral"
###
### openrouter, also frewe
###
LLM_API_KEY="<<go-get-one-yourself"
LLM_PROVIDER="custom"
LLM_MODEL="openrouter/google/gemini-2.0-flash-lite-preview-02-05:free"
LLM_ENDPOINT="https://openrouter.ai/api/v1"
###
### deepinfra
###
LLM_API_KEY="<<>>"
LLM_PROVIDER="custom"
LLM_MODEL="deepinfra/meta-llama/Meta-Llama-3-8B-Instruct"
LLM_ENDPOINT="https://api.deepinfra.com/v1/openai"
EMBEDDING_PROVIDER="openai"
EMBEDDING_MODEL="openai/text-embedding-3-large"
EMBEDDING_API_KEY="<<>>"
EMBEDDING_MODEL="deepinfra/BAAI/bge-base-en-v1.5"
EMBEDDING_ENDPOINT=""
EMBEDDING_API_VERSION=""
EMBEDDING_DIMENSIONS=3072
EMBEDDING_MAX_TOKENS=8191
# If embedding key is not provided same key set for LLM_API_KEY will be used
#EMBEDDING_API_KEY="your_api_key"
################################################################################
# 🗄️ Relational database settings
################################################################################
###
### DB
###
###
### db minimal/default
###
GRAPH_DATABASE_PROVIDER="networkx"
VECTOR_DB_PROVIDER="lancedb"
DB_PROVIDER=sqlite
DB_NAME=cognee_db
###
### Relational options
###
DB_PROVIDER="sqlite"
DB_NAME=cognee_db
# -- To switch to Postgres / PGVector, uncomment and fill these: -------------
#DB_PROVIDER=postgres
#DB_NAME=cognee_db
# To use Postgres with the Cognee backend in Docker compose use the following instead: DB_HOST=host.docker.internal
#DB_HOST=127.0.0.1
#DB_PORT=5432
#DB_USERNAME=cognee
#DB_PASSWORD=cognee
DB_PROVIDER=postgres
DB_NAME=cognee_db
DB_HOST=127.0.0.1
DB_PORT=5432
DB_USERNAME=cognee
DB_PASSWORD=cognee
################################################################################
# 🕸️ Graph Database settings
################################################################################
###
### Graph options
###
#Default
# Default (local file-based)
GRAPH_DATABASE_PROVIDER="kuzu"
# -- To switch to Remote Kuzu uncomment and fill these: -------------------------------------------------------------
#GRAPH_DATABASE_PROVIDER="kuzu"
#GRAPH_DATABASE_PROVIDER="kuzu-remote"
#GRAPH_DATABASE_URL="http://localhost:8000"
#GRAPH_DATABASE_USERNAME=XXX
#GRAPH_DATABASE_PASSWORD=YYY
#or if using remote
# -- To switch to Neo4j uncomment and fill these: -------------------------------------------------------------------
#GRAPH_DATABASE_PROVIDER="neo4j"
#GRAPH_DATABASE_URL=bolt://localhost:7687
#GRAPH_DATABASE_USERNAME=neo4j
#GRAPH_DATABASE_PASSWORD=localneo4j
GRAPH_DATABASE_PROVIDER="kuzu"
GRAPH_DATABASE_PROVIDER="kuzu-remote"
GRAPH_DATABASE_URL="http://localhost:8000"
GRAPH_DATABASE_USERNAME=XXX
GRAPH_DATABASE_PASSWORD=YYY
################################################################################
# 📐 Vector Database settings
################################################################################
# or if using neo4j
GRAPH_DATABASE_PROVIDER="neo4j"
GRAPH_DATABASE_URL=bolt://localhost:7687
GRAPH_DATABASE_USERNAME=neo4j
GRAPH_DATABASE_PASSWORD=localneo4j
###
### Vector options
###
# Supported providers: pgvector | qdrant | weaviate | milvus | lancedb | chromadb
VECTOR_DB_PROVIDER="lancedb"
# Not needed if a cloud vector database is not used
VECTOR_DB_URL=
VECTOR_DB_KEY=
################################################################################
# 📂 ROOT DIRECTORY IF USING COGNEE LIB INSIDE A DOCKER
################################################################################
# Set up the Cognee system directory. Cognee will store system files and databases here.
DATA_ROOT_DIRECTORY='/cognee_data/data'
SYSTEM_ROOT_DIRECTORY='/cognee_data/system'
VECTOR_DB_PROVIDER="pgvector"
###
### for release test
###
################################################################################
# 🔄 MIGRATION (RELATIONAL → GRAPH) SETTINGS
################################################################################
LLM_API_KEY="..."
MIGRATION_DB_PATH="/path/to/migration/directory"
MIGRATION_DB_NAME="migration_database.sqlite"
OPENAI_API_KEY="..."
MIGRATION_DB_PATH="~/Downloads/"
MIGRATION_DB_NAME="Chinook_Sqlite.sqlite"
MIGRATION_DB_PROVIDER="sqlite"
# -- Postgres-specific migration params --------------------------------------
GRAPH_DATABASE_URL="bolt://54.246.89.112:7687"
GRAPH_DATABASE_USERNAME="neo4j"
GRAPH_DATABASE_PASSWORD="pleaseletmein"
###
### ROOT DIRECTORY IF USING COGNEE LIB INSIDE A DOCKER
###
# Set up the Cognee system directory. Cognee will store system files and databases here.
DATA_ROOT_DIRECTORY ='/cognee_data/data'
SYSTEM_ROOT_DIRECTORY= '/cognee_data/system'
# Postgres specific parameters (Only if Postgres or PGVector is used). Do not use for cognee default simplest setup of SQLite-NetworkX-LanceDB
# DB_USERNAME=cognee
# DB_PASSWORD=cognee
# To use Postgres with the Cognee backend in Docker compose use the following instead: DB_HOST=host.docker.internal
# DB_HOST=127.0.0.1
# DB_PORT=5432
# Params for migrating relational database data to graph / Cognee ( PostgreSQL and SQLite supported )
# MIGRATION_DB_PATH="/path/to/migration/directory"
# MIGRATION_DB_NAME="migration_database.sqlite"
# MIGRATION_DB_PROVIDER="sqlite"
# Postgres specific parameters for migration
# MIGRATION_DB_USERNAME=cognee
# MIGRATION_DB_PASSWORD=cognee
# MIGRATION_DB_HOST="127.0.0.1"
# MIGRATION_DB_PORT=5432
################################################################################
# 🔒 Security Settings
################################################################################
# LITELLM Logging Level. Set to quiten down logging
LITELLM_LOG="ERROR"
# When set to false don't allow adding of local system files to Cognee. Should be set to False when Cognee is used as a backend.
ACCEPT_LOCAL_FILE_PATH=True
# When set to false don't allow HTTP requests to be sent from Cognee.
# This protects against Server Side Request Forgery when proper infrastructure is not in place.
ALLOW_HTTP_REQUESTS=True
# Set this environment variable to disable sending telemetry data
# TELEMETRY_DISABLED=1
# Set this variable to True to enforce usage of backend access control for Cognee
# Note: This is only currently supported by the following databases:
@ -117,94 +194,3 @@ ALLOW_HTTP_REQUESTS=True
# It enforces LanceDB and KuzuDB use and uses them to create databases per Cognee user + dataset
ENABLE_BACKEND_ACCESS_CONTROL=False
################################################################################
# 🛠️ DEV Settings
################################################################################
ENV="local"
TOKENIZERS_PARALLELISM="false"
# LITELLM Logging Level. Set to quiet down logging
LITELLM_LOG="ERROR"
# Set this environment variable to disable sending telemetry data
# TELEMETRY_DISABLED=1
# Default User Configuration
# DEFAULT_USER_EMAIL=""
# DEFAULT_USER_PASSWORD=""
------------------------------- END OF POSSIBLE SETTINGS -------------------------------
###############################################################################
# 🧪 EXAMPLE OVERRIDES (commented out)
###############################################################################
# The blocks below show how to configure alternative providers.
# Uncomment + fill values to switch.
########## Azure OpenAI #######################################################
#LLM_MODEL="azure/gpt-4o-mini"
#LLM_ENDPOINT="https://DNS.azure.com/openai/deployments/gpt-4o-mini"
#LLM_API_KEY="<<TALK TO YOUR AZURE GUY"
#LLM_API_VERSION="2024-12-01-preview"
## llm api version might not be relevant
#LLM_MAX_TOKENS="16384"
#EMBEDDING_MODEL="azure/text-embedding-3-large"
#EMBEDDING_ENDPOINT="https://DNS.openai.azure.com/openai/deployments/text-embedding-3-large"
#EMBEDDING_API_KEY="<<TALK TO YOUR AZURE GUY>"
#EMBEDDING_API_VERSION="2024-12-01-preview"
#EMBEDDING_DIMENSIONS=3072
#EMBEDDING_MAX_TOKENS=8191
########## Local LLM via Ollama ###############################################
#LLM_API_KEY ="ollama"
#LLM_MODEL="llama3.1:8b"
#LLM_PROVIDER="ollama"
#LLM_ENDPOINT="http://localhost:11434/v1"
#EMBEDDING_PROVIDER="ollama"
#EMBEDDING_MODEL="avr/sfr-embedding-mistral:latest"
#EMBEDDING_ENDPOINT="http://localhost:11434/api/embeddings"
#EMBEDDING_DIMENSIONS=4096
#HUGGINGFACE_TOKENIZER="Salesforce/SFR-Embedding-Mistral"
########## OpenRouter (also free) #########################################################
#LLM_API_KEY="<<go-get-one-yourself"
#LLM_PROVIDER="custom"
#LLM_MODEL="openrouter/google/gemini-2.0-flash-lite-preview-02-05:free"
#LLM_ENDPOINT="https://openrouter.ai/api/v1"
########## DeepInfra ##########################################################
#LLM_API_KEY="<<>>"
#LLM_PROVIDER="custom"
#LLM_MODEL="deepinfra/meta-llama/Meta-Llama-3-8B-Instruct"
#LLM_ENDPOINT="https://api.deepinfra.com/v1/openai"
#EMBEDDING_PROVIDER="openai"
#EMBEDDING_API_KEY="<<>>"
#EMBEDDING_MODEL="deepinfra/BAAI/bge-base-en-v1.5"
#EMBEDDING_ENDPOINT=""
#EMBEDDING_API_VERSION=""
#EMBEDDING_DIMENSIONS=3072
#EMBEDDING_MAX_TOKENS=8191
########## Release Test ###############################################
#LLM_API_KEY="..."
#OPENAI_API_KEY="..."
#MIGRATION_DB_PATH="~/Downloads/"
#MIGRATION_DB_NAME="Chinook_Sqlite.sqlite"
#MIGRATION_DB_PROVIDER="sqlite"
#GRAPH_DATABASE_URL="bolt://54.246.89.112:7687"
#GRAPH_DATABASE_USERNAME="neo4j"
#GRAPH_DATABASE_PASSWORD="pleaseletmein"