Update env.example

This commit is contained in:
yangdx 2025-08-06 18:50:58 +08:00
parent a6ef29cef6
commit 7780776af6

View file

@ -143,8 +143,8 @@ OLLAMA_LLM_NUM_CTX=32768
# OLLAMA_LLM_STOP='["</s>", "Assistant:", "\n\n"]'
### If OLLAMA_LLM_TEMPERATURE is not specified, the system will default to the value defined by TEMPERATURE
# OLLAMA_LLM_TEMPERATURE=0.85
### see also env.ollama-binding-options.example for fine tuning ollama
### use the following command to see all support options for Ollama LLM
### lightrag-server --llm-binding ollama --help
####################################################################################
### Embedding Configuration (Should not be changed after the first file processed)
@ -179,13 +179,11 @@ EMBEDDING_BINDING_HOST=http://localhost:11434
# EMBEDDING_DIM=2048
# EMBEDDING_BINDING_API_KEY=your_api_key
### use the following command to see all support options for Ollama
### lightrag-server --llm-binding ollama --help
### Optional for Ollama embedding
# OLLAMA_EMBEDDING_NUM_CTX=8192
### use the following command to see all support options for Ollama embedding
### lightrag-server --embedding-binding ollama --help
### use the following command to see all support options for openai and azure_openai
### lightrag-server --llm-binding openai --help
####################################################################
### WORKSPACE setting workspace name for all storage types
### in the purpose of isolating data from LightRAG instances.