Compare commits

...
Sign in to create a new pull request.

3 commits

Author SHA1 Message Date
Lucas Oliveira
fa4e61fdec Added llm and embedding provider config onto .env.example 2025-12-16 10:53:31 -03:00
Lucas Oliveira
14db65f8cd Grouped AWS Access Key env variables 2025-12-16 10:48:03 -03:00
Eric Hare
8f8129c3d2
fix: Add other model provider keys as examples 2025-12-15 12:12:53 -08:00

View file

@ -40,13 +40,28 @@ GOOGLE_OAUTH_CLIENT_SECRET=
MICROSOFT_GRAPH_OAUTH_CLIENT_ID=
MICROSOFT_GRAPH_OAUTH_CLIENT_SECRET=
# AWS Access Key ID and Secret Access Key with access to your S3 instance
AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
# OPTIONAL: dns routable from google (etc.) to handle continous ingest (something like ngrok works). This enables continous ingestion
WEBHOOK_BASE_URL=
# Model Provider API Keys
OPENAI_API_KEY=
ANTHROPIC_API_KEY=
OLLAMA_ENDPOINT=
WATSONX_API_KEY=
WATSONX_ENDPOINT=
WATSONX_PROJECT_ID=
AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
# LLM Provider configuration. Providers can be "anthropic", "watsonx", "ibm" or "ollama".
LLM_PROVIDER=
LLM_MODEL=
# Embedding provider configuration. Providers can be "watsonx", "ibm" or "ollama".
EMBEDDING_PROVIDER=
EMBEDDING_MODEL=
# OPTIONAL url for openrag link to langflow in the UI
LANGFLOW_PUBLIC_URL=