diff --git a/env.example b/env.example index 9de38d30..5720affc 100644 --- a/env.example +++ b/env.example @@ -143,8 +143,8 @@ OLLAMA_LLM_NUM_CTX=32768 # OLLAMA_LLM_STOP='["", "Assistant:", "\n\n"]' ### If OLLAMA_LLM_TEMPERATURE is not specified, the system will default to the value defined by TEMPERATURE # OLLAMA_LLM_TEMPERATURE=0.85 -### see also env.ollama-binding-options.example for fine tuning ollama - +### use the following command to see all support options for Ollama LLM +### lightrag-server --llm-binding ollama --help #################################################################################### ### Embedding Configuration (Should not be changed after the first file processed) @@ -179,13 +179,11 @@ EMBEDDING_BINDING_HOST=http://localhost:11434 # EMBEDDING_DIM=2048 # EMBEDDING_BINDING_API_KEY=your_api_key -### use the following command to see all support options for Ollama -### lightrag-server --llm-binding ollama --help +### Optional for Ollama embedding +# OLLAMA_EMBEDDING_NUM_CTX=8192 +### use the following command to see all support options for Ollama embedding ### lightrag-server --embedding-binding ollama --help -### use the following command to see all support options for openai and azure_openai -### lightrag-server --llm-binding openai --help - #################################################################### ### WORKSPACE setting workspace name for all storage types ### in the purpose of isolating data from LightRAG instances.