From 626bc76f5ccdb830e741cf74464e1e3a967dec75 Mon Sep 17 00:00:00 2001 From: Rita Aleksziev Date: Thu, 9 Jan 2025 12:53:26 +0100 Subject: [PATCH] Set max_tokens in config --- cognee/api/v1/cognify/code_graph_pipeline.py | 2 +- cognee/modules/cognify/config.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/cognee/api/v1/cognify/code_graph_pipeline.py b/cognee/api/v1/cognify/code_graph_pipeline.py index 53c41d43b..2d077f39b 100644 --- a/cognee/api/v1/cognify/code_graph_pipeline.py +++ b/cognee/api/v1/cognify/code_graph_pipeline.py @@ -71,7 +71,7 @@ async def run_code_graph_pipeline(repo_path, include_docs=True): Task(ingest_data_with_metadata, dataset_name="repo_docs", user=user), Task(get_data_list_for_user, dataset_name="repo_docs", user=user), Task(classify_documents), - Task(extract_chunks_from_documents, max_tokens=8192), + Task(extract_chunks_from_documents, max_tokens=cognee_config.max_tokens), Task( extract_graph_from_data, graph_model=KnowledgeGraph, task_config={"batch_size": 50} ), diff --git a/cognee/modules/cognify/config.py b/cognee/modules/cognify/config.py index d40410bfc..dd94d8b41 100644 --- a/cognee/modules/cognify/config.py +++ b/cognee/modules/cognify/config.py @@ -1,12 +1,14 @@ from functools import lru_cache from pydantic_settings import BaseSettings, SettingsConfigDict from cognee.shared.data_models import DefaultContentPrediction, SummarizedContent +from typing import Optional +import os class CognifyConfig(BaseSettings): classification_model: object = DefaultContentPrediction summarization_model: object = SummarizedContent - + max_tokens: Optional[int] = os.getenv("MAX_TOKENS") model_config = SettingsConfigDict(env_file=".env", extra="allow") def to_dict(self) -> dict: