<!-- .github/pull_request_template.md --> ## Description <!-- Provide a clear description of the changes in this PR --> ## DCO Affirmation I affirm that all code in every commit of this pull request conforms to the terms of the Topoteretes Developer Certificate of Origin.
196 lines
4.2 KiB
Text
196 lines
4.2 KiB
Text
|
|
###
|
|
### DEV
|
|
###
|
|
|
|
|
|
TOKENIZERS_PARALLELISM="false"
|
|
|
|
###
|
|
### LLM
|
|
###
|
|
|
|
###
|
|
### simple, "expensive", an OpenAPI key
|
|
###
|
|
|
|
LLM_API_KEY="your_api_key"
|
|
|
|
###
|
|
### DEV LLM, cheap with content filters
|
|
###
|
|
|
|
LLM_MODEL="azure/gpt-4o-mini"
|
|
LLM_ENDPOINT="https://DNS.azure.com/openai/deployments/gpt-4o-mini"
|
|
LLM_API_KEY="<<TALK TO YOUR AZURE GUY"
|
|
LLM_API_VERSION="2024-12-01-preview"
|
|
#llm api version might not be relevant
|
|
LLM_MAX_TOKENS="16384"
|
|
|
|
EMBEDDING_MODEL="azure/text-embedding-3-large"
|
|
EMBEDDING_ENDPOINT="https://DNS.openai.azure.com/openai/deployments/text-embedding-3-large"
|
|
EMBEDDING_API_KEY="<<TALK TO YOUR AZURE GUY>"
|
|
EMBEDDING_API_VERSION="2024-12-01-preview"
|
|
EMBEDDING_DIMENSIONS=3072
|
|
EMBEDDING_MAX_TOKENS=8191
|
|
|
|
###
|
|
### free local LLM, install it
|
|
###
|
|
|
|
LLM_API_KEY = "ollama"
|
|
LLM_MODEL = "llama3.1:8b"
|
|
LLM_PROVIDER = "ollama"
|
|
LLM_ENDPOINT = "http://localhost:11434/v1"
|
|
EMBEDDING_PROVIDER = "ollama"
|
|
EMBEDDING_MODEL = "avr/sfr-embedding-mistral:latest"
|
|
EMBEDDING_ENDPOINT = "http://localhost:11434/api/embeddings"
|
|
EMBEDDING_DIMENSIONS = 4096
|
|
HUGGINGFACE_TOKENIZER = "Salesforce/SFR-Embedding-Mistral"
|
|
|
|
###
|
|
### openrouter, also frewe
|
|
###
|
|
|
|
LLM_API_KEY="<<go-get-one-yourself"
|
|
LLM_PROVIDER="custom"
|
|
LLM_MODEL="openrouter/google/gemini-2.0-flash-lite-preview-02-05:free"
|
|
LLM_ENDPOINT="https://openrouter.ai/api/v1"
|
|
|
|
###
|
|
### deepinfra
|
|
###
|
|
|
|
LLM_API_KEY="<<>>"
|
|
LLM_PROVIDER="custom"
|
|
LLM_MODEL="deepinfra/meta-llama/Meta-Llama-3-8B-Instruct"
|
|
LLM_ENDPOINT="https://api.deepinfra.com/v1/openai"
|
|
|
|
EMBEDDING_PROVIDER="openai"
|
|
EMBEDDING_API_KEY="<<>>"
|
|
EMBEDDING_MODEL="deepinfra/BAAI/bge-base-en-v1.5"
|
|
EMBEDDING_ENDPOINT=""
|
|
EMBEDDING_API_VERSION=""
|
|
EMBEDDING_DIMENSIONS=3072
|
|
EMBEDDING_MAX_TOKENS=8191
|
|
|
|
###
|
|
### DB
|
|
###
|
|
|
|
###
|
|
### db minimal/default
|
|
###
|
|
|
|
GRAPH_DATABASE_PROVIDER="networkx"
|
|
VECTOR_DB_PROVIDER="lancedb"
|
|
DB_PROVIDER=sqlite
|
|
DB_NAME=cognee_db
|
|
|
|
###
|
|
### Relational options
|
|
###
|
|
|
|
DB_PROVIDER="sqlite"
|
|
DB_NAME=cognee_db
|
|
|
|
DB_PROVIDER=postgres
|
|
DB_NAME=cognee_db
|
|
DB_HOST=127.0.0.1
|
|
DB_PORT=5432
|
|
DB_USERNAME=cognee
|
|
DB_PASSWORD=cognee
|
|
|
|
###
|
|
### Graph options
|
|
###
|
|
|
|
|
|
#Default
|
|
|
|
GRAPH_DATABASE_PROVIDER="kuzu"
|
|
|
|
#or if using remote
|
|
|
|
GRAPH_DATABASE_PROVIDER="kuzu"
|
|
GRAPH_DATABASE_PROVIDER="kuzu-remote"
|
|
GRAPH_DATABASE_URL="http://localhost:8000"
|
|
GRAPH_DATABASE_USERNAME=XXX
|
|
GRAPH_DATABASE_PASSWORD=YYY
|
|
|
|
# or if using neo4j
|
|
|
|
GRAPH_DATABASE_PROVIDER="neo4j"
|
|
GRAPH_DATABASE_URL=bolt://localhost:7687
|
|
GRAPH_DATABASE_USERNAME=neo4j
|
|
GRAPH_DATABASE_PASSWORD=localneo4j
|
|
|
|
###
|
|
### Vector options
|
|
###
|
|
|
|
VECTOR_DB_PROVIDER="lancedb"
|
|
|
|
VECTOR_DB_PROVIDER="pgvector"
|
|
|
|
###
|
|
### for release test
|
|
###
|
|
|
|
LLM_API_KEY="..."
|
|
|
|
OPENAI_API_KEY="..."
|
|
|
|
MIGRATION_DB_PATH="~/Downloads/"
|
|
MIGRATION_DB_NAME="Chinook_Sqlite.sqlite"
|
|
MIGRATION_DB_PROVIDER="sqlite"
|
|
|
|
GRAPH_DATABASE_URL="bolt://54.246.89.112:7687"
|
|
GRAPH_DATABASE_USERNAME="neo4j"
|
|
GRAPH_DATABASE_PASSWORD="pleaseletmein"
|
|
|
|
###
|
|
### ROOT DIRECTORY IF USING COGNEE LIB INSIDE A DOCKER
|
|
###
|
|
# Set up the Cognee system directory. Cognee will store system files and databases here.
|
|
|
|
|
|
DATA_ROOT_DIRECTORY ='/cognee_data/data'
|
|
SYSTEM_ROOT_DIRECTORY= '/cognee_data/system'
|
|
|
|
|
|
|
|
# Postgres specific parameters (Only if Postgres or PGVector is used). Do not use for cognee default simplest setup of SQLite-NetworkX-LanceDB
|
|
# DB_USERNAME=cognee
|
|
# DB_PASSWORD=cognee
|
|
# To use Postgres with the Cognee backend in Docker compose use the following instead: DB_HOST=host.docker.internal
|
|
# DB_HOST=127.0.0.1
|
|
# DB_PORT=5432
|
|
|
|
|
|
|
|
# Params for migrating relational database data to graph / Cognee ( PostgreSQL and SQLite supported )
|
|
# MIGRATION_DB_PATH="/path/to/migration/directory"
|
|
# MIGRATION_DB_NAME="migration_database.sqlite"
|
|
# MIGRATION_DB_PROVIDER="sqlite"
|
|
# Postgres specific parameters for migration
|
|
# MIGRATION_DB_USERNAME=cognee
|
|
# MIGRATION_DB_PASSWORD=cognee
|
|
# MIGRATION_DB_HOST="127.0.0.1"
|
|
# MIGRATION_DB_PORT=5432
|
|
|
|
# LITELLM Logging Level. Set to quiten down logging
|
|
LITELLM_LOG="ERROR"
|
|
|
|
# Set this environment variable to disable sending telemetry data
|
|
# TELEMETRY_DISABLED=1
|
|
|
|
# Set this variable to True to enforce usage of backend access control for Cognee
|
|
# Note: This is only currently supported by the following databases:
|
|
# Relational: SQLite, Postgres
|
|
# Vector: LanceDB
|
|
# Graph: KuzuDB
|
|
#
|
|
# It enforces LanceDB and KuzuDB use and uses them to create databases per Cognee user + dataset
|
|
ENABLE_BACKEND_ACCESS_CONTROL=False
|
|
|