From 3b78eb88bd4bc778089f1061408cb413b5e7ff20 Mon Sep 17 00:00:00 2001 From: Andrej Milicevic Date: Mon, 24 Nov 2025 16:38:23 +0100 Subject: [PATCH] fix: use s3 config --- cognee/api/v1/search/search.py | 2 +- .../infrastructure/files/storage/s3_config.py | 3 ++ cognee/infrastructure/llm/config.py | 14 ------- .../litellm_instructor/llm/bedrock/adapter.py | 41 +++++++------------ .../litellm_instructor/llm/get_llm_client.py | 8 +--- 5 files changed, 19 insertions(+), 49 deletions(-) diff --git a/cognee/api/v1/search/search.py b/cognee/api/v1/search/search.py index e64bcb848..49f7aee51 100644 --- a/cognee/api/v1/search/search.py +++ b/cognee/api/v1/search/search.py @@ -161,7 +161,7 @@ async def search( - LLM_API_KEY: API key for your LLM provider Optional: - - LLM_PROVIDER, LLM_MODEL: Configure LLM for search responses (supports: openai, anthropic, gemini, ollama, bedrock) + - LLM_PROVIDER, LLM_MODEL: Configure LLM for search responses - VECTOR_DB_PROVIDER: Must match what was used during cognify - GRAPH_DATABASE_PROVIDER: Must match what was used during cognify diff --git a/cognee/infrastructure/files/storage/s3_config.py b/cognee/infrastructure/files/storage/s3_config.py index 0b9372b7e..4cc6b1d63 100644 --- a/cognee/infrastructure/files/storage/s3_config.py +++ b/cognee/infrastructure/files/storage/s3_config.py @@ -8,6 +8,9 @@ class S3Config(BaseSettings): aws_endpoint_url: Optional[str] = None aws_access_key_id: Optional[str] = None aws_secret_access_key: Optional[str] = None + aws_session_token: Optional[str] = None + aws_profile_name: Optional[str] = None + aws_bedrock_runtime_endpoint: Optional[str] = None model_config = SettingsConfigDict(env_file=".env", extra="allow") diff --git a/cognee/infrastructure/llm/config.py b/cognee/infrastructure/llm/config.py index 091f8e6ea..7aa8f33f7 100644 --- a/cognee/infrastructure/llm/config.py +++ b/cognee/infrastructure/llm/config.py @@ -27,12 +27,6 @@ class LLMConfig(BaseSettings): - embedding_rate_limit_enabled - embedding_rate_limit_requests - embedding_rate_limit_interval - - aws_access_key_id (Bedrock) - - aws_secret_access_key (Bedrock) - - aws_session_token (Bedrock) - - aws_region_name (Bedrock) - - aws_profile_name (Bedrock) - - aws_bedrock_runtime_endpoint (Bedrock) Public methods include: - ensure_env_vars_for_ollama @@ -71,14 +65,6 @@ class LLMConfig(BaseSettings): fallback_endpoint: str = "" fallback_model: str = "" - # AWS Bedrock configuration - aws_access_key_id: Optional[str] = None - aws_secret_access_key: Optional[str] = None - aws_session_token: Optional[str] = None - aws_region_name: str = "us-east-1" - aws_profile_name: Optional[str] = None - aws_bedrock_runtime_endpoint: Optional[str] = None - baml_registry: ClassVar[ClientRegistry] = ClientRegistry() model_config = SettingsConfigDict(env_file=".env", extra="allow") diff --git a/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/bedrock/adapter.py b/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/bedrock/adapter.py index 868fe51b8..66f484164 100644 --- a/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/bedrock/adapter.py +++ b/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/bedrock/adapter.py @@ -11,6 +11,7 @@ from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.ll LLMInterface, ) from cognee.infrastructure.llm.exceptions import ContentPolicyFilterError +from cognee.infrastructure.files.storage.s3_config import get_s3_config from cognee.infrastructure.files.utils.open_data_file import open_data_file from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.rate_limiter import ( rate_limit_async, @@ -34,10 +35,6 @@ class BedrockAdapter(LLMInterface): name = "Bedrock" model: str api_key: str - aws_access_key_id: str - aws_secret_access_key: str - aws_region_name: str - aws_profile_name: str MAX_RETRIES = 5 @@ -45,12 +42,6 @@ class BedrockAdapter(LLMInterface): self, model: str, api_key: str = None, - aws_access_key_id: str = None, - aws_secret_access_key: str = None, - aws_session_token: str = None, - aws_region_name: str = "us-east-1", - aws_profile_name: str = None, - aws_bedrock_runtime_endpoint: str = None, max_tokens: int = 16384, streaming: bool = False, ): @@ -58,12 +49,6 @@ class BedrockAdapter(LLMInterface): self.client = instructor.from_litellm(litellm.completion) self.model = model self.api_key = api_key - self.aws_access_key_id = aws_access_key_id - self.aws_secret_access_key = aws_secret_access_key - self.aws_session_token = aws_session_token - self.aws_region_name = aws_region_name - self.aws_profile_name = aws_profile_name - self.aws_bedrock_runtime_endpoint = aws_bedrock_runtime_endpoint self.max_tokens = max_tokens self.streaming = streaming @@ -89,22 +74,24 @@ IMPORTANT: You must respond with valid JSON only. Do not include any text before "stream": self.streaming, } + s3_config = get_s3_config() + # Add authentication parameters if self.api_key: request_params["api_key"] = self.api_key - elif self.aws_access_key_id and self.aws_secret_access_key: - request_params["aws_access_key_id"] = self.aws_access_key_id - request_params["aws_secret_access_key"] = self.aws_secret_access_key - if self.aws_session_token: - request_params["aws_session_token"] = self.aws_session_token - elif self.aws_profile_name: - request_params["aws_profile_name"] = self.aws_profile_name + elif s3_config.aws_access_key_id and s3_config.aws_secret_access_key: + request_params["aws_access_key_id"] = s3_config.aws_access_key_id + request_params["aws_secret_access_key"] = s3_config.aws_secret_access_key + if s3_config.aws_session_token: + request_params["aws_session_token"] = s3_config.aws_session_token + elif s3_config.aws_profile_name: + request_params["aws_profile_name"] = s3_config.aws_profile_name # Add optional parameters - if self.aws_region_name: - request_params["aws_region_name"] = self.aws_region_name - if self.aws_bedrock_runtime_endpoint: - request_params["aws_bedrock_runtime_endpoint"] = self.aws_bedrock_runtime_endpoint + if s3_config.aws_region_name: + request_params["aws_region_name"] = s3_config.aws_region_name + if s3_config.aws_bedrock_runtime_endpoint: + request_params["aws_bedrock_runtime_endpoint"] = s3_config.aws_bedrock_runtime_endpoint return request_params diff --git a/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get_llm_client.py b/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get_llm_client.py index 946698a95..489f7ae8e 100644 --- a/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get_llm_client.py +++ b/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/get_llm_client.py @@ -156,13 +156,7 @@ def get_llm_client(): return BedrockAdapter( model=llm_config.llm_model, api_key=llm_config.llm_api_key, - aws_access_key_id=llm_config.aws_access_key_id, - aws_secret_access_key=llm_config.aws_secret_access_key, - aws_session_token=llm_config.aws_session_token, - aws_region_name=llm_config.aws_region_name, - aws_profile_name=llm_config.aws_profile_name, - aws_bedrock_runtime_endpoint=llm_config.aws_bedrock_runtime_endpoint, - max_tokens=max_tokens, + max_tokens=max_completion_tokens, streaming=llm_config.llm_streaming, )