From 73b458a5be787ab2fd64a80d42f3fdefb02962fc Mon Sep 17 00:00:00 2001 From: Eric Hare Date: Tue, 16 Dec 2025 05:53:49 -0800 Subject: [PATCH] fix: Add other model provider keys as examples, grouped AWS with other OAuth credentials (#670) * fix: Add other model provider keys as examples * Grouped AWS Access Key env variables * Added llm and embedding provider config onto .env.example --------- Co-authored-by: Lucas Oliveira --- .env.example | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/.env.example b/.env.example index 081c9026..5d231931 100644 --- a/.env.example +++ b/.env.example @@ -40,13 +40,28 @@ GOOGLE_OAUTH_CLIENT_SECRET= MICROSOFT_GRAPH_OAUTH_CLIENT_ID= MICROSOFT_GRAPH_OAUTH_CLIENT_SECRET= +# AWS Access Key ID and Secret Access Key with access to your S3 instance +AWS_ACCESS_KEY_ID= +AWS_SECRET_ACCESS_KEY= + # OPTIONAL: dns routable from google (etc.) to handle continous ingest (something like ngrok works). This enables continous ingestion WEBHOOK_BASE_URL= +# Model Provider API Keys OPENAI_API_KEY= +ANTHROPIC_API_KEY= +OLLAMA_ENDPOINT= +WATSONX_API_KEY= +WATSONX_ENDPOINT= +WATSONX_PROJECT_ID= -AWS_ACCESS_KEY_ID= -AWS_SECRET_ACCESS_KEY= +# LLM Provider configuration. Providers can be "anthropic", "watsonx", "ibm" or "ollama". +LLM_PROVIDER= +LLM_MODEL= + +# Embedding provider configuration. Providers can be "watsonx", "ibm" or "ollama". +EMBEDDING_PROVIDER= +EMBEDDING_MODEL= # OPTIONAL url for openrag link to langflow in the UI LANGFLOW_PUBLIC_URL=