diff --git a/cognee/config.py b/cognee/config.py index b18fb1637..5fedb1658 100644 --- a/cognee/config.py +++ b/cognee/config.py @@ -50,15 +50,14 @@ class Config: graph_filename = os.getenv("GRAPH_NAME", "cognee_graph.pkl") # Model parameters - llm_provider: str = "openai" #openai, or custom or ollama - custom_model: str = "mistralai/Mixtral-8x7B-Instruct-v0.1" - custom_endpoint: str = "https://api.endpoints.anyscale.com/v1" # pass claude endpoint + llm_provider: str = os.getenv("LLM_PROVIDER","openai") #openai, or custom or ollama + custom_model: str = os.getenv("CUSTOM_LLM_MODEL", "mistralai/Mixtral-8x7B-Instruct-v0.1") #"mistralai/Mixtral-8x7B-Instruct-v0.1" + custom_endpoint: str = os.getenv("CUSTOM_ENDPOINT", "https://api.endpoints.anyscale.com/v1") #"https://api.endpoints.anyscale.com/v1" # pass claude endpoint custom_key: Optional[str] = os.getenv("ANYSCALE_API_KEY") - ollama_endpoint: str = "http://localhost:11434/v1" + ollama_endpoint: str = os.getenv("CUSTOM_OLLAMA_ENDPOINT", "http://localhost:11434/v1") #"http://localhost:11434/v1" ollama_key: Optional[str] = "ollama" - ollama_model: str = "mistral:instruct" - model: str = "gpt-4-1106-preview" - # model: str = "gpt-3.5-turbo" + ollama_model: str = os.getenv("CUSTOM_OLLAMA_MODEL", "mistral:instruct") #"mistral:instruct" + model: str = os.getenv("OPENAI_MODEL","gpt-4-1106-preview" ) #"gpt-4-1106-preview" model_endpoint: str = "openai" openai_key: Optional[str] = os.getenv("OPENAI_API_KEY") openai_temperature: float = float(os.getenv("OPENAI_TEMPERATURE", 0.0))