fix: use s3 config
This commit is contained in:
parent
c91d1ff0ae
commit
3b78eb88bd
5 changed files with 19 additions and 49 deletions
|
|
@ -161,7 +161,7 @@ async def search(
|
|||
- LLM_API_KEY: API key for your LLM provider
|
||||
|
||||
Optional:
|
||||
- LLM_PROVIDER, LLM_MODEL: Configure LLM for search responses (supports: openai, anthropic, gemini, ollama, bedrock)
|
||||
- LLM_PROVIDER, LLM_MODEL: Configure LLM for search responses
|
||||
- VECTOR_DB_PROVIDER: Must match what was used during cognify
|
||||
- GRAPH_DATABASE_PROVIDER: Must match what was used during cognify
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,9 @@ class S3Config(BaseSettings):
|
|||
aws_endpoint_url: Optional[str] = None
|
||||
aws_access_key_id: Optional[str] = None
|
||||
aws_secret_access_key: Optional[str] = None
|
||||
aws_session_token: Optional[str] = None
|
||||
aws_profile_name: Optional[str] = None
|
||||
aws_bedrock_runtime_endpoint: Optional[str] = None
|
||||
model_config = SettingsConfigDict(env_file=".env", extra="allow")
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -27,12 +27,6 @@ class LLMConfig(BaseSettings):
|
|||
- embedding_rate_limit_enabled
|
||||
- embedding_rate_limit_requests
|
||||
- embedding_rate_limit_interval
|
||||
- aws_access_key_id (Bedrock)
|
||||
- aws_secret_access_key (Bedrock)
|
||||
- aws_session_token (Bedrock)
|
||||
- aws_region_name (Bedrock)
|
||||
- aws_profile_name (Bedrock)
|
||||
- aws_bedrock_runtime_endpoint (Bedrock)
|
||||
|
||||
Public methods include:
|
||||
- ensure_env_vars_for_ollama
|
||||
|
|
@ -71,14 +65,6 @@ class LLMConfig(BaseSettings):
|
|||
fallback_endpoint: str = ""
|
||||
fallback_model: str = ""
|
||||
|
||||
# AWS Bedrock configuration
|
||||
aws_access_key_id: Optional[str] = None
|
||||
aws_secret_access_key: Optional[str] = None
|
||||
aws_session_token: Optional[str] = None
|
||||
aws_region_name: str = "us-east-1"
|
||||
aws_profile_name: Optional[str] = None
|
||||
aws_bedrock_runtime_endpoint: Optional[str] = None
|
||||
|
||||
baml_registry: ClassVar[ClientRegistry] = ClientRegistry()
|
||||
|
||||
model_config = SettingsConfigDict(env_file=".env", extra="allow")
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.ll
|
|||
LLMInterface,
|
||||
)
|
||||
from cognee.infrastructure.llm.exceptions import ContentPolicyFilterError
|
||||
from cognee.infrastructure.files.storage.s3_config import get_s3_config
|
||||
from cognee.infrastructure.files.utils.open_data_file import open_data_file
|
||||
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.rate_limiter import (
|
||||
rate_limit_async,
|
||||
|
|
@ -34,10 +35,6 @@ class BedrockAdapter(LLMInterface):
|
|||
name = "Bedrock"
|
||||
model: str
|
||||
api_key: str
|
||||
aws_access_key_id: str
|
||||
aws_secret_access_key: str
|
||||
aws_region_name: str
|
||||
aws_profile_name: str
|
||||
|
||||
MAX_RETRIES = 5
|
||||
|
||||
|
|
@ -45,12 +42,6 @@ class BedrockAdapter(LLMInterface):
|
|||
self,
|
||||
model: str,
|
||||
api_key: str = None,
|
||||
aws_access_key_id: str = None,
|
||||
aws_secret_access_key: str = None,
|
||||
aws_session_token: str = None,
|
||||
aws_region_name: str = "us-east-1",
|
||||
aws_profile_name: str = None,
|
||||
aws_bedrock_runtime_endpoint: str = None,
|
||||
max_tokens: int = 16384,
|
||||
streaming: bool = False,
|
||||
):
|
||||
|
|
@ -58,12 +49,6 @@ class BedrockAdapter(LLMInterface):
|
|||
self.client = instructor.from_litellm(litellm.completion)
|
||||
self.model = model
|
||||
self.api_key = api_key
|
||||
self.aws_access_key_id = aws_access_key_id
|
||||
self.aws_secret_access_key = aws_secret_access_key
|
||||
self.aws_session_token = aws_session_token
|
||||
self.aws_region_name = aws_region_name
|
||||
self.aws_profile_name = aws_profile_name
|
||||
self.aws_bedrock_runtime_endpoint = aws_bedrock_runtime_endpoint
|
||||
self.max_tokens = max_tokens
|
||||
self.streaming = streaming
|
||||
|
||||
|
|
@ -89,22 +74,24 @@ IMPORTANT: You must respond with valid JSON only. Do not include any text before
|
|||
"stream": self.streaming,
|
||||
}
|
||||
|
||||
s3_config = get_s3_config()
|
||||
|
||||
# Add authentication parameters
|
||||
if self.api_key:
|
||||
request_params["api_key"] = self.api_key
|
||||
elif self.aws_access_key_id and self.aws_secret_access_key:
|
||||
request_params["aws_access_key_id"] = self.aws_access_key_id
|
||||
request_params["aws_secret_access_key"] = self.aws_secret_access_key
|
||||
if self.aws_session_token:
|
||||
request_params["aws_session_token"] = self.aws_session_token
|
||||
elif self.aws_profile_name:
|
||||
request_params["aws_profile_name"] = self.aws_profile_name
|
||||
elif s3_config.aws_access_key_id and s3_config.aws_secret_access_key:
|
||||
request_params["aws_access_key_id"] = s3_config.aws_access_key_id
|
||||
request_params["aws_secret_access_key"] = s3_config.aws_secret_access_key
|
||||
if s3_config.aws_session_token:
|
||||
request_params["aws_session_token"] = s3_config.aws_session_token
|
||||
elif s3_config.aws_profile_name:
|
||||
request_params["aws_profile_name"] = s3_config.aws_profile_name
|
||||
|
||||
# Add optional parameters
|
||||
if self.aws_region_name:
|
||||
request_params["aws_region_name"] = self.aws_region_name
|
||||
if self.aws_bedrock_runtime_endpoint:
|
||||
request_params["aws_bedrock_runtime_endpoint"] = self.aws_bedrock_runtime_endpoint
|
||||
if s3_config.aws_region_name:
|
||||
request_params["aws_region_name"] = s3_config.aws_region_name
|
||||
if s3_config.aws_bedrock_runtime_endpoint:
|
||||
request_params["aws_bedrock_runtime_endpoint"] = s3_config.aws_bedrock_runtime_endpoint
|
||||
|
||||
return request_params
|
||||
|
||||
|
|
|
|||
|
|
@ -156,13 +156,7 @@ def get_llm_client():
|
|||
return BedrockAdapter(
|
||||
model=llm_config.llm_model,
|
||||
api_key=llm_config.llm_api_key,
|
||||
aws_access_key_id=llm_config.aws_access_key_id,
|
||||
aws_secret_access_key=llm_config.aws_secret_access_key,
|
||||
aws_session_token=llm_config.aws_session_token,
|
||||
aws_region_name=llm_config.aws_region_name,
|
||||
aws_profile_name=llm_config.aws_profile_name,
|
||||
aws_bedrock_runtime_endpoint=llm_config.aws_bedrock_runtime_endpoint,
|
||||
max_tokens=max_tokens,
|
||||
max_tokens=max_completion_tokens,
|
||||
streaming=llm_config.llm_streaming,
|
||||
)
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue