docs: Update .env.template to include llm and embedding options

This commit is contained in:
Igor Ilic 2025-01-23 18:05:45 +01:00
parent b25a82e206
commit 1319944dcd
3 changed files with 17 additions and 4 deletions

View file

@ -1,12 +1,27 @@
ENV="local" ENV="local"
TOKENIZERS_PARALLELISM="false" TOKENIZERS_PARALLELISM="false"
LLM_API_KEY=
# LLM settings
LLM_API_KEY=""
LLM_MODEL="openai/gpt-4o-mini"
LLM_PROVIDER="openai"
LLM_ENDPOINT=""
LLM_API_VERSION=""
GRAPHISTRY_USERNAME= GRAPHISTRY_USERNAME=
GRAPHISTRY_PASSWORD= GRAPHISTRY_PASSWORD=
SENTRY_REPORTING_URL= SENTRY_REPORTING_URL=
# Embedding settings
EMBEDDING_PROVIDER="openai"
EMBEDDING_API_KEY=""
EMBEDDING_MODEL="openai/text-embedding-3-large"
EMBEDDING_ENDPOINT=""
EMBEDDING_API_VERSION=""
EMBEDDING_DIMENSIONS=3072
EMBEDDING_MAX_TOKENS=8191
# "neo4j" or "networkx" # "neo4j" or "networkx"
GRAPH_DATABASE_PROVIDER="networkx" GRAPH_DATABASE_PROVIDER="networkx"
# Not needed if using networkx # Not needed if using networkx

View file

@ -9,8 +9,6 @@ from cognee.infrastructure.databases.exceptions.EmbeddingException import Embedd
from cognee.infrastructure.llm.tokenizer.Gemini import GeminiTokenizer from cognee.infrastructure.llm.tokenizer.Gemini import GeminiTokenizer
from cognee.infrastructure.llm.tokenizer.HuggingFace import HuggingFaceTokenizer from cognee.infrastructure.llm.tokenizer.HuggingFace import HuggingFaceTokenizer
from cognee.infrastructure.llm.tokenizer.TikToken import TikTokenTokenizer from cognee.infrastructure.llm.tokenizer.TikToken import TikTokenTokenizer
from transformers import AutoTokenizer
import tiktoken # Assuming this is how you import TikToken
litellm.set_verbose = False litellm.set_verbose = False
logger = logging.getLogger("LiteLLMEmbeddingEngine") logger = logging.getLogger("LiteLLMEmbeddingEngine")

View file

@ -10,7 +10,7 @@ class EmbeddingConfig(BaseSettings):
embedding_endpoint: Optional[str] = None embedding_endpoint: Optional[str] = None
embedding_api_key: Optional[str] = None embedding_api_key: Optional[str] = None
embedding_api_version: Optional[str] = None embedding_api_version: Optional[str] = None
embedding_max_tokens: Optional[int] = float("inf") embedding_max_tokens: Optional[int] = 8191
model_config = SettingsConfigDict(env_file=".env", extra="allow") model_config = SettingsConfigDict(env_file=".env", extra="allow")