* fix: Add other model provider keys as examples * Grouped AWS Access Key env variables * Added llm and embedding provider config onto .env.example --------- Co-authored-by: Lucas Oliveira <lucas.edu.oli@hotmail.com>
77 lines
2.8 KiB
Text
77 lines
2.8 KiB
Text
# Ingestion Configuration
|
|
# Set to true to disable Langflow ingestion and use traditional OpenRAG processor
|
|
# If unset or false, Langflow pipeline will be used (default: upload -> ingest -> delete)
|
|
DISABLE_INGEST_WITH_LANGFLOW=false
|
|
|
|
# Langflow HTTP timeout configuration (in seconds)
|
|
# For large documents (300+ pages), ingestion can take 30+ minutes
|
|
# Increase these values if you experience timeouts with very large PDFs
|
|
# Default: 2400 seconds (40 minutes) total timeout, 30 seconds connection timeout
|
|
# LANGFLOW_TIMEOUT=2400
|
|
# LANGFLOW_CONNECT_TIMEOUT=30
|
|
|
|
# make one like so https://docs.langflow.org/api-keys-and-authentication#langflow-secret-key
|
|
LANGFLOW_SECRET_KEY=
|
|
|
|
# flow ids for chat and ingestion flows
|
|
LANGFLOW_CHAT_FLOW_ID=1098eea1-6649-4e1d-aed1-b77249fb8dd0
|
|
LANGFLOW_INGEST_FLOW_ID=5488df7c-b93f-4f87-a446-b67028bc0813
|
|
LANGFLOW_URL_INGEST_FLOW_ID=72c3d17c-2dac-4a73-b48a-6518473d7830
|
|
# Ingest flow using docling
|
|
# LANGFLOW_INGEST_FLOW_ID=1402618b-e6d1-4ff2-9a11-d6ce71186915
|
|
NUDGES_FLOW_ID=ebc01d31-1976-46ce-a385-b0240327226c
|
|
|
|
# Set a strong admin password for OpenSearch; a bcrypt hash is generated at
|
|
# container startup from this value. Do not commit real secrets.
|
|
# must match the hashed password in secureconfig, must change for secure deployment!!!
|
|
# NOTE: if you set this by hand, it must be a complex password:
|
|
# The password must contain at least 8 characters, and must contain at least one uppercase letter, one lowercase letter, one digit, and one special character.
|
|
OPENSEARCH_PASSWORD=
|
|
|
|
# Path to persist OpenSearch data (indices, documents, cluster state)
|
|
# Default: ./opensearch-data
|
|
OPENSEARCH_DATA_PATH=./opensearch-data
|
|
|
|
# make here https://console.cloud.google.com/apis/credentials
|
|
GOOGLE_OAUTH_CLIENT_ID=
|
|
GOOGLE_OAUTH_CLIENT_SECRET=
|
|
|
|
# Azure app registration credentials for SharePoint/OneDrive
|
|
MICROSOFT_GRAPH_OAUTH_CLIENT_ID=
|
|
MICROSOFT_GRAPH_OAUTH_CLIENT_SECRET=
|
|
|
|
# AWS Access Key ID and Secret Access Key with access to your S3 instance
|
|
AWS_ACCESS_KEY_ID=
|
|
AWS_SECRET_ACCESS_KEY=
|
|
|
|
# OPTIONAL: dns routable from google (etc.) to handle continous ingest (something like ngrok works). This enables continous ingestion
|
|
WEBHOOK_BASE_URL=
|
|
|
|
# Model Provider API Keys
|
|
OPENAI_API_KEY=
|
|
ANTHROPIC_API_KEY=
|
|
OLLAMA_ENDPOINT=
|
|
WATSONX_API_KEY=
|
|
WATSONX_ENDPOINT=
|
|
WATSONX_PROJECT_ID=
|
|
|
|
# LLM Provider configuration. Providers can be "anthropic", "watsonx", "ibm" or "ollama".
|
|
LLM_PROVIDER=
|
|
LLM_MODEL=
|
|
|
|
# Embedding provider configuration. Providers can be "watsonx", "ibm" or "ollama".
|
|
EMBEDDING_PROVIDER=
|
|
EMBEDDING_MODEL=
|
|
|
|
# OPTIONAL url for openrag link to langflow in the UI
|
|
LANGFLOW_PUBLIC_URL=
|
|
|
|
# OPTIONAL: Override host for docling service (for special networking setups)
|
|
# HOST_DOCKER_INTERNAL=host.containers.internal
|
|
|
|
# Langflow auth
|
|
LANGFLOW_AUTO_LOGIN=False
|
|
LANGFLOW_SUPERUSER=
|
|
LANGFLOW_SUPERUSER_PASSWORD=
|
|
LANGFLOW_NEW_USER_IS_ACTIVE=False
|
|
LANGFLOW_ENABLE_SUPERUSER_CLI=False
|