Merge branch 'main' of github.com:langflow-ai/openrag into docling-req

This commit is contained in:
Mike Fortman 2025-10-06 09:49:04 -05:00
commit da66d2e613
13 changed files with 5774 additions and 592 deletions

View file

@ -1,60 +1,43 @@
# Ingestion Configuration
# Set to true to disable Langflow ingestion and use the traditional OpenRAG processor.
# If unset or false, the Langflow pipeline is used (default: upload -> ingest -> delete).
# Set to true to disable Langflow ingestion and use traditional OpenRAG processor
# If unset or false, Langflow pipeline will be used (default: upload -> ingest -> delete)
DISABLE_INGEST_WITH_LANGFLOW=false
# Create a Langflow secret key:
# https://docs.langflow.org/api-keys-and-authentication#langflow-secret-key
# make one like so https://docs.langflow.org/api-keys-and-authentication#langflow-secret-key
LANGFLOW_SECRET_KEY=
# Flow IDs for chat and ingestion
# flow ids for chat and ingestion flows
LANGFLOW_CHAT_FLOW_ID=1098eea1-6649-4e1d-aed1-b77249fb8dd0
# LANGFLOW_INGEST_FLOW_ID=5488df7c-b93f-4f87-a446-b67028bc0813
LANGFLOW_INGEST_FLOW_ID=5488df7c-b93f-4f87-a446-b67028bc0813
LANGFLOW_URL_INGEST_FLOW_ID=72c3d17c-2dac-4a73-b48a-6518473d7830
# Ingest flow using docling
LANGFLOW_INGEST_FLOW_ID=1402618b-e6d1-4ff2-9a11-d6ce71186915
# LANGFLOW_INGEST_FLOW_ID=1402618b-e6d1-4ff2-9a11-d6ce71186915
NUDGES_FLOW_ID=ebc01d31-1976-46ce-a385-b0240327226c
# OpenSearch Auth
# Set a strong admin password for OpenSearch.
# A bcrypt hash is generated at container startup from this value.
# Do not commit real secrets.
# Must be changed for secure deployments.
# Set a strong admin password for OpenSearch; a bcrypt hash is generated at
# container startup from this value. Do not commit real secrets.
# must match the hashed password in secureconfig, must change for secure deployment!!!
OPENSEARCH_PASSWORD=
# Google OAuth
# Create credentials here:
# https://console.cloud.google.com/apis/credentials
# make here https://console.cloud.google.com/apis/credentials
GOOGLE_OAUTH_CLIENT_ID=
GOOGLE_OAUTH_CLIENT_SECRET=
# Microsoft (SharePoint/OneDrive) OAuth
# Azure app registration credentials.
# Azure app registration credentials for SharePoint/OneDrive
MICROSOFT_GRAPH_OAUTH_CLIENT_ID=
MICROSOFT_GRAPH_OAUTH_CLIENT_SECRET=
# Webhooks (optional)
# Public, DNS-resolvable base URL (e.g., via ngrok) for continuous ingestion.
# OPTIONAL: dns routable from google (etc.) to handle continous ingest (something like ngrok works). This enables continous ingestion
WEBHOOK_BASE_URL=
# API Keys
OPENAI_API_KEY=
AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
# Langflow UI URL (optional)
# Public URL to link OpenRAG to Langflow in the UI.
# OPTIONAL url for openrag link to langflow in the UI
LANGFLOW_PUBLIC_URL=
# Langflow Auth
# Langflow auth
LANGFLOW_AUTO_LOGIN=False
LANGFLOW_SUPERUSER=
LANGFLOW_SUPERUSER_PASSWORD=

1
.gitignore vendored
View file

@ -17,6 +17,7 @@ wheels/
1001*.pdf
*.json
!flows/*.json
.DS_Store
config/

View file

@ -40,10 +40,10 @@ services:
openrag-backend:
image: phact/openrag-backend:${OPENRAG_VERSION:-latest}
#build:
#context: .
#dockerfile: Dockerfile.backend
container_name: openrag-backend
# build:
# context: .
# dockerfile: Dockerfile.backend
# container_name: openrag-backend
depends_on:
- langflow
environment:
@ -55,6 +55,7 @@ services:
- LANGFLOW_SUPERUSER_PASSWORD=${LANGFLOW_SUPERUSER_PASSWORD}
- LANGFLOW_CHAT_FLOW_ID=${LANGFLOW_CHAT_FLOW_ID}
- LANGFLOW_INGEST_FLOW_ID=${LANGFLOW_INGEST_FLOW_ID}
- LANGFLOW_URL_INGEST_FLOW_ID=${LANGFLOW_URL_INGEST_FLOW_ID}
- DISABLE_INGEST_WITH_LANGFLOW=${DISABLE_INGEST_WITH_LANGFLOW:-false}
- NUDGES_FLOW_ID=${NUDGES_FLOW_ID}
- OPENSEARCH_PORT=9200
@ -77,9 +78,9 @@ services:
openrag-frontend:
image: phact/openrag-frontend:${OPENRAG_VERSION:-latest}
#build:
#context: .
#dockerfile: Dockerfile.frontend
# build:
# context: .
# dockerfile: Dockerfile.frontend
container_name: openrag-frontend
depends_on:
- openrag-backend
@ -92,6 +93,9 @@ services:
volumes:
- ./flows:/app/flows:Z
image: phact/openrag-langflow:${LANGFLOW_VERSION:-latest}
# build:
# context: .
# dockerfile: Dockerfile.langflow
container_name: langflow
ports:
- "7860:7860"
@ -99,15 +103,23 @@ services:
- OPENAI_API_KEY=${OPENAI_API_KEY}
- LANGFLOW_LOAD_FLOWS_PATH=/app/flows
- LANGFLOW_SECRET_KEY=${LANGFLOW_SECRET_KEY}
- JWT="dummy"
- JWT=None
- OWNER=None
- OWNER_NAME=None
- OWNER_EMAIL=None
- CONNECTOR_TYPE=system
- OPENRAG-QUERY-FILTER="{}"
- OPENSEARCH_PASSWORD=${OPENSEARCH_PASSWORD}
- LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT=JWT,OPENRAG-QUERY-FILTER,OPENSEARCH_PASSWORD
- FILENAME=None
- MIMETYPE=None
- FILESIZE=0
- LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT=JWT,OPENRAG-QUERY-FILTER,OPENSEARCH_PASSWORD,OWNER,OWNER_NAME,OWNER_EMAIL,CONNECTOR_TYPE,FILENAME,MIMETYPE,FILESIZE
- LANGFLOW_LOG_LEVEL=DEBUG
- LANGFLOW_AUTO_LOGIN=${LANGFLOW_AUTO_LOGIN}
- LANGFLOW_SUPERUSER=${LANGFLOW_SUPERUSER}
- LANGFLOW_SUPERUSER_PASSWORD=${LANGFLOW_SUPERUSER_PASSWORD}
- LANGFLOW_NEW_USER_IS_ACTIVE=${LANGFLOW_NEW_USER_IS_ACTIVE}
- LANGFLOW_ENABLE_SUPERUSER_CLI=${LANGFLOW_ENABLE_SUPERUSER_CLI}
- DEFAULT_FOLDER_NAME=OpenRAG
# - DEFAULT_FOLDER_NAME=OpenRAG
- HIDE_GETTING_STARTED_PROGRESS=true

View file

@ -43,7 +43,7 @@ services:
# build:
# context: .
# dockerfile: Dockerfile.backend
container_name: openrag-backend
# container_name: openrag-backend
depends_on:
- langflow
environment:
@ -54,6 +54,7 @@ services:
- LANGFLOW_SUPERUSER_PASSWORD=${LANGFLOW_SUPERUSER_PASSWORD}
- LANGFLOW_CHAT_FLOW_ID=${LANGFLOW_CHAT_FLOW_ID}
- LANGFLOW_INGEST_FLOW_ID=${LANGFLOW_INGEST_FLOW_ID}
- LANGFLOW_URL_INGEST_FLOW_ID=${LANGFLOW_URL_INGEST_FLOW_ID}
- DISABLE_INGEST_WITH_LANGFLOW=${DISABLE_INGEST_WITH_LANGFLOW:-false}
- NUDGES_FLOW_ID=${NUDGES_FLOW_ID}
- OPENSEARCH_PORT=9200
@ -80,7 +81,7 @@ services:
# build:
# context: .
# dockerfile: Dockerfile.frontend
# #dockerfile: Dockerfile.frontend
#dockerfile: Dockerfile.frontend
container_name: openrag-frontend
depends_on:
- openrag-backend
@ -109,13 +110,16 @@ services:
- OWNER_EMAIL=None
- CONNECTOR_TYPE=system
- OPENRAG-QUERY-FILTER="{}"
- FILENAME=None
- MIMETYPE=None
- FILESIZE=0
- OPENSEARCH_PASSWORD=${OPENSEARCH_PASSWORD}
- LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT=JWT,OPENRAG-QUERY-FILTER,OPENSEARCH_PASSWORD,OWNER,OWNER_NAME,OWNER_EMAIL,CONNECTOR_TYPE
- LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT=JWT,OPENRAG-QUERY-FILTER,OPENSEARCH_PASSWORD,OWNER,OWNER_NAME,OWNER_EMAIL,CONNECTOR_TYPE,FILENAME,MIMETYPE,FILESIZE
- LANGFLOW_LOG_LEVEL=DEBUG
- LANGFLOW_AUTO_LOGIN=${LANGFLOW_AUTO_LOGIN}
- LANGFLOW_SUPERUSER=${LANGFLOW_SUPERUSER}
- LANGFLOW_SUPERUSER_PASSWORD=${LANGFLOW_SUPERUSER_PASSWORD}
- LANGFLOW_NEW_USER_IS_ACTIVE=${LANGFLOW_NEW_USER_IS_ACTIVE}
- LANGFLOW_ENABLE_SUPERUSER_CLI=${LANGFLOW_ENABLE_SUPERUSER_CLI}
- DEFAULT_FOLDER_NAME=OpenRAG
# - DEFAULT_FOLDER_NAME=OpenRAG
- HIDE_GETTING_STARTED_PROGRESS=true

File diff suppressed because one or more lines are too long

View file

@ -144,6 +144,8 @@
"targetHandle": "{œfieldNameœ:œagent_llmœ,œidœ:œAgent-crjWfœ,œinputTypesœ:[œLanguageModelœ],œtypeœ:œstrœ}"
},
{
"animated": false,
"className": "",
"data": {
"sourceHandle": {
"dataType": "TextInput",
@ -163,6 +165,7 @@
}
},
"id": "xy-edge__TextInput-aHsQb{œdataTypeœ:œTextInputœ,œidœ:œTextInput-aHsQbœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-OpenSearch-iYfjf{œfieldNameœ:œfilter_expressionœ,œidœ:œOpenSearch-iYfjfœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
"selected": false,
"source": "TextInput-aHsQb",
"sourceHandle": "{œdataTypeœ:œTextInputœ,œidœ:œTextInput-aHsQbœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}",
"target": "OpenSearch-iYfjf",
@ -727,7 +730,7 @@
],
"frozen": false,
"icon": "OpenSearch",
"last_updated": "2025-10-02T20:05:34.814Z",
"last_updated": "2025-10-04T05:41:33.344Z",
"legacy": false,
"lf_version": "1.6.0",
"metadata": {
@ -1381,7 +1384,7 @@
],
"frozen": false,
"icon": "binary",
"last_updated": "2025-10-02T20:05:34.815Z",
"last_updated": "2025-10-04T05:41:33.345Z",
"legacy": false,
"lf_version": "1.6.0",
"metadata": {
@ -1660,7 +1663,7 @@
},
"position": {
"x": 727.4791597769406,
"y": 518.0820551650631
"y": 416.82609966052854
},
"selected": false,
"type": "genericNode"
@ -1706,7 +1709,7 @@
],
"frozen": false,
"icon": "bot",
"last_updated": "2025-10-02T20:05:34.872Z",
"last_updated": "2025-10-04T05:41:33.399Z",
"legacy": false,
"lf_version": "1.6.0",
"metadata": {
@ -2245,7 +2248,7 @@
],
"frozen": false,
"icon": "brain-circuit",
"last_updated": "2025-10-02T20:05:34.815Z",
"last_updated": "2025-10-04T05:41:33.347Z",
"legacy": false,
"lf_version": "1.6.0",
"metadata": {
@ -2551,7 +2554,7 @@
}
],
"viewport": {
"x": -237.0727605845459,
"x": -149.48015964664273,
"y": 154.6885920024542,
"zoom": 0.602433700773958
}
@ -2560,7 +2563,7 @@
"endpoint_name": null,
"id": "1098eea1-6649-4e1d-aed1-b77249fb8dd0",
"is_component": false,
"last_tested_version": "1.6.0",
"last_tested_version": "1.6.3.dev0",
"name": "OpenRAG OpenSearch Agent",
"tags": [
"assistants",

3616
flows/openrag_url_mcp.json Normal file

File diff suppressed because one or more lines are too long

View file

@ -34,6 +34,7 @@ _legacy_flow_id = os.getenv("FLOW_ID")
LANGFLOW_CHAT_FLOW_ID = os.getenv("LANGFLOW_CHAT_FLOW_ID") or _legacy_flow_id
LANGFLOW_INGEST_FLOW_ID = os.getenv("LANGFLOW_INGEST_FLOW_ID")
LANGFLOW_URL_INGEST_FLOW_ID = os.getenv("LANGFLOW_URL_INGEST_FLOW_ID")
NUDGES_FLOW_ID = os.getenv("NUDGES_FLOW_ID")
if _legacy_flow_id and not os.getenv("LANGFLOW_CHAT_FLOW_ID"):

View file

@ -1,5 +1,6 @@
from config.settings import (
DISABLE_INGEST_WITH_LANGFLOW,
LANGFLOW_URL_INGEST_FLOW_ID,
NUDGES_FLOW_ID,
LANGFLOW_URL,
LANGFLOW_CHAT_FLOW_ID,
@ -116,9 +117,11 @@ class FlowsService:
flow_id = LANGFLOW_CHAT_FLOW_ID
elif flow_type == "ingest":
flow_id = LANGFLOW_INGEST_FLOW_ID
elif flow_type == "url_ingest":
flow_id = LANGFLOW_URL_INGEST_FLOW_ID
else:
raise ValueError(
"flow_type must be either 'nudges', 'retrieval', or 'ingest'"
"flow_type must be either 'nudges', 'retrieval', 'ingest', or 'url_ingest'"
)
if not flow_id:
@ -291,6 +294,13 @@ class FlowsService:
"llm_name": None, # Ingestion flow might not have LLM
"llm_text_name": None,
},
{
"name": "url_ingest",
"flow_id": LANGFLOW_URL_INGEST_FLOW_ID,
"embedding_name": OPENAI_EMBEDDING_COMPONENT_DISPLAY_NAME,
"llm_name": None,
"llm_text_name": None,
},
]
results = []
@ -716,6 +726,10 @@ class FlowsService:
"name": "ingest",
"flow_id": LANGFLOW_INGEST_FLOW_ID,
},
{
"name": "url_ingest",
"flow_id": LANGFLOW_URL_INGEST_FLOW_ID,
},
]
# Determine target component IDs based on provider

View file

@ -67,6 +67,7 @@ class LangflowFileService:
owner_name: Optional[str] = None,
owner_email: Optional[str] = None,
connector_type: Optional[str] = None,
file_tuples: Optional[list[tuple[str, str, str]]] = None,
) -> Dict[str, Any]:
"""
Trigger the ingestion flow with provided file paths.
@ -86,7 +87,9 @@ class LangflowFileService:
# Pass files via tweaks to File component (File-PSU37 from the flow)
if file_paths:
tweaks["DoclingRemote-78KoX"] = {"path": file_paths}
tweaks["DoclingRemote-Dp3PX"] = {"path": file_paths}
# Pass JWT token via tweaks using the x-langflow-global-var- pattern
if jwt_token:
@ -129,7 +132,8 @@ class LangflowFileService:
list(tweaks.keys()) if isinstance(tweaks, dict) else None,
bool(jwt_token),
)
# To compute the file size in bytes, use len() on the file content (which should be bytes)
file_size_bytes = len(file_tuples[0][1]) if file_tuples and len(file_tuples[0]) > 1 else 0
# Avoid logging full payload to prevent leaking sensitive data (e.g., JWT)
headers={
"X-Langflow-Global-Var-JWT": str(jwt_token),
@ -137,6 +141,9 @@ class LangflowFileService:
"X-Langflow-Global-Var-OWNER_NAME": str(owner_name),
"X-Langflow-Global-Var-OWNER_EMAIL": str(owner_email),
"X-Langflow-Global-Var-CONNECTOR_TYPE": str(connector_type),
"X-Langflow-Global-Var-FILENAME": str(file_tuples[0][0]),
"X-Langflow-Global-Var-MIMETYPE": str(file_tuples[0][2]),
"X-Langflow-Global-Var-FILESIZE": str(file_size_bytes),
}
logger.info(f"[LF] Headers {headers}")
logger.info(f"[LF] Payload {payload}")
@ -271,6 +278,7 @@ class LangflowFileService:
owner_name=owner_name,
owner_email=owner_email,
connector_type=connector_type,
file_tuples=[file_tuple],
)
logger.debug("[LF] Ingestion completed successfully")
except Exception as e:

View file

@ -55,6 +55,7 @@ services:
- LANGFLOW_SUPERUSER_PASSWORD=${LANGFLOW_SUPERUSER_PASSWORD}
- LANGFLOW_CHAT_FLOW_ID=${LANGFLOW_CHAT_FLOW_ID}
- LANGFLOW_INGEST_FLOW_ID=${LANGFLOW_INGEST_FLOW_ID}
- LANGFLOW_URL_INGEST_FLOW_ID=${LANGFLOW_URL_INGEST_FLOW_ID}
- DISABLE_INGEST_WITH_LANGFLOW=${DISABLE_INGEST_WITH_LANGFLOW:-false}
- NUDGES_FLOW_ID=${NUDGES_FLOW_ID}
- OPENSEARCH_PORT=9200
@ -99,15 +100,22 @@ services:
- OPENAI_API_KEY=${OPENAI_API_KEY}
- LANGFLOW_LOAD_FLOWS_PATH=/app/flows
- LANGFLOW_SECRET_KEY=${LANGFLOW_SECRET_KEY}
- JWT="dummy"
- JWT=None
- OWNER=None
- OWNER_NAME=None
- OWNER_EMAIL=None
- CONNECTOR_TYPE=system
- OPENRAG-QUERY-FILTER="{}"
- OPENSEARCH_PASSWORD=${OPENSEARCH_PASSWORD}
- LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT=JWT,OPENRAG-QUERY-FILTER,OPENSEARCH_PASSWORD
- FILENAME=None
- MIMETYPE=None
- FILESIZE=0
- LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT=JWT,OPENRAG-QUERY-FILTER,OPENSEARCH_PASSWORD,OWNER,OWNER_NAME,OWNER_EMAIL,CONNECTOR_TYPE,FILENAME,MIMETYPE,FILESIZE
- LANGFLOW_LOG_LEVEL=DEBUG
- LANGFLOW_AUTO_LOGIN=${LANGFLOW_AUTO_LOGIN}
- LANGFLOW_SUPERUSER=${LANGFLOW_SUPERUSER}
- LANGFLOW_SUPERUSER_PASSWORD=${LANGFLOW_SUPERUSER_PASSWORD}
- LANGFLOW_NEW_USER_IS_ACTIVE=${LANGFLOW_NEW_USER_IS_ACTIVE}
- LANGFLOW_ENABLE_SUPERUSER_CLI=${LANGFLOW_ENABLE_SUPERUSER_CLI}
- DEFAULT_FOLDER_NAME=OpenRAG
# - DEFAULT_FOLDER_NAME=OpenRAG
- HIDE_GETTING_STARTED_PROGRESS=true

View file

@ -54,6 +54,7 @@ services:
- LANGFLOW_SUPERUSER_PASSWORD=${LANGFLOW_SUPERUSER_PASSWORD}
- LANGFLOW_CHAT_FLOW_ID=${LANGFLOW_CHAT_FLOW_ID}
- LANGFLOW_INGEST_FLOW_ID=${LANGFLOW_INGEST_FLOW_ID}
- LANGFLOW_URL_INGEST_FLOW_ID=${LANGFLOW_URL_INGEST_FLOW_ID}
- DISABLE_INGEST_WITH_LANGFLOW=${DISABLE_INGEST_WITH_LANGFLOW:-false}
- NUDGES_FLOW_ID=${NUDGES_FLOW_ID}
- OPENSEARCH_PORT=9200
@ -99,15 +100,22 @@ services:
- OPENAI_API_KEY=${OPENAI_API_KEY}
- LANGFLOW_LOAD_FLOWS_PATH=/app/flows
- LANGFLOW_SECRET_KEY=${LANGFLOW_SECRET_KEY}
- JWT="dummy"
- JWT=None
- OWNER=None
- OWNER_NAME=None
- OWNER_EMAIL=None
- CONNECTOR_TYPE=system
- OPENRAG-QUERY-FILTER="{}"
- OPENSEARCH_PASSWORD=${OPENSEARCH_PASSWORD}
- LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT=JWT,OPENRAG-QUERY-FILTER,OPENSEARCH_PASSWORD
- FILENAME=None
- MIMETYPE=None
- FILESIZE=0
- LANGFLOW_VARIABLES_TO_GET_FROM_ENVIRONMENT=JWT,OPENRAG-QUERY-FILTER,OPENSEARCH_PASSWORD,OWNER,OWNER_NAME,OWNER_EMAIL,CONNECTOR_TYPE,FILENAME,MIMETYPE,FILESIZE
- LANGFLOW_LOG_LEVEL=DEBUG
- LANGFLOW_AUTO_LOGIN=${LANGFLOW_AUTO_LOGIN}
- LANGFLOW_SUPERUSER=${LANGFLOW_SUPERUSER}
- LANGFLOW_SUPERUSER_PASSWORD=${LANGFLOW_SUPERUSER_PASSWORD}
- LANGFLOW_NEW_USER_IS_ACTIVE=${LANGFLOW_NEW_USER_IS_ACTIVE}
- LANGFLOW_ENABLE_SUPERUSER_CLI=${LANGFLOW_ENABLE_SUPERUSER_CLI}
- DEFAULT_FOLDER_NAME=OpenRAG
# - DEFAULT_FOLDER_NAME="OpenRAG"
- HIDE_GETTING_STARTED_PROGRESS=true

View file

@ -32,7 +32,8 @@ class EnvConfig:
langflow_superuser: str = "admin"
langflow_superuser_password: str = ""
langflow_chat_flow_id: str = "1098eea1-6649-4e1d-aed1-b77249fb8dd0"
langflow_ingest_flow_id: str = "1402618b-e6d1-4ff2-9a11-d6ce71186915"
langflow_ingest_flow_id: str = "5488df7c-b93f-4f87-a446-b67028bc0813"
langflow_url_ingest_flow_id: str = "72c3d17c-2dac-4a73-b48a-6518473d7830"
# OAuth settings
google_oauth_client_id: str = ""
@ -114,6 +115,7 @@ class EnvManager:
"LANGFLOW_SUPERUSER_PASSWORD": "langflow_superuser_password",
"LANGFLOW_CHAT_FLOW_ID": "langflow_chat_flow_id",
"LANGFLOW_INGEST_FLOW_ID": "langflow_ingest_flow_id",
"LANGFLOW_URL_INGEST_FLOW_ID": "langflow_url_ingest_flow_id",
"NUDGES_FLOW_ID": "nudges_flow_id",
"GOOGLE_OAUTH_CLIENT_ID": "google_oauth_client_id",
"GOOGLE_OAUTH_CLIENT_SECRET": "google_oauth_client_secret",
@ -255,6 +257,7 @@ class EnvManager:
f.write(
f"LANGFLOW_INGEST_FLOW_ID={self._quote_env_value(self.config.langflow_ingest_flow_id)}\n"
)
f.write(f"LANGFLOW_URL_INGEST_FLOW_ID={self._quote_env_value(self.config.langflow_url_ingest_flow_id)}\n")
f.write(f"NUDGES_FLOW_ID={self._quote_env_value(self.config.nudges_flow_id)}\n")
f.write(f"OPENSEARCH_PASSWORD={self._quote_env_value(self.config.opensearch_password)}\n")
f.write(f"OPENAI_API_KEY={self._quote_env_value(self.config.openai_api_key)}\n")