fix: fixes to adapter and tests

This commit is contained in:
Andrej Milicevic 2025-11-25 12:58:07 +01:00
parent d97acba78e
commit e0d48c043a
9 changed files with 117 additions and 144 deletions

View file

@ -1,28 +0,0 @@
name: test | bedrock | api key
on:
workflow_call:
jobs:
test-bedrock-api-key:
name: Run Bedrock API Key Test
runs-on: ubuntu-22.04
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Cognee Setup
uses: ./.github/actions/cognee_setup
with:
python-version: '3.11.x'
- name: Run Bedrock API Key Test
env:
LLM_PROVIDER: "bedrock"
LLM_API_KEY: ${{ secrets.BEDROCK_API_KEY }}
LLM_MODEL: "us.anthropic.claude-3-5-sonnet-20241022-v2:0"
AWS_REGION_NAME: "us-east-1"
EMBEDDING_PROVIDER: "bedrock"
EMBEDDING_MODEL: "amazon.titan-embed-text-v1"
EMBEDDING_DIMENSIONS: "1536"
run: poetry run python ./examples/python/simple_example.py

View file

@ -1,29 +0,0 @@
name: test | bedrock | aws credentials
on:
workflow_call:
jobs:
test-bedrock-aws-credentials:
name: Run Bedrock AWS Credentials Test
runs-on: ubuntu-22.04
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Cognee Setup
uses: ./.github/actions/cognee_setup
with:
python-version: '3.11.x'
- name: Run Bedrock AWS Credentials Test
env:
LLM_PROVIDER: "bedrock"
LLM_MODEL: "us.anthropic.claude-3-5-sonnet-20240620-v1:0"
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION_NAME: "us-east-1"
EMBEDDING_PROVIDER: "cohere"
EMBEDDING_MODEL: "cohere.embed-english-v3"
EMBEDDING_DIMENSIONS: "1024"
run: poetry run python ./examples/python/simple_example.py

View file

@ -1,37 +0,0 @@
name: test | bedrock | aws profile
on:
workflow_call:
jobs:
test-bedrock-aws-profile:
name: Run Bedrock AWS Profile Test
runs-on: ubuntu-22.04
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Cognee Setup
uses: ./.github/actions/cognee_setup
with:
python-version: '3.11.x'
- name: Configure AWS Profile
run: |
mkdir -p ~/.aws
cat > ~/.aws/credentials << EOF
[bedrock-test]
aws_access_key_id = ${{ secrets.AWS_ACCESS_KEY_ID }}
aws_secret_access_key = ${{ secrets.AWS_SECRET_ACCESS_KEY }}
EOF
- name: Run Bedrock AWS Profile Test
env:
LLM_PROVIDER: "bedrock"
LLM_MODEL: "us.anthropic.claude-3-5-haiku-20241022-v1:0"
AWS_PROFILE_NAME: "bedrock-test"
AWS_REGION_NAME: "us-east-1"
EMBEDDING_PROVIDER: "bedrock"
EMBEDDING_MODEL: "amazon.titan-embed-text-v2:0"
EMBEDDING_DIMENSIONS: "1024"
run: poetry run python ./examples/python/simple_example.py

View file

@ -84,3 +84,91 @@ jobs:
EMBEDDING_DIMENSIONS: "3072"
EMBEDDING_MAX_TOKENS: "8191"
run: uv run python ./examples/python/simple_example.py
test-bedrock-api-key:
name: Run Bedrock API Key Test
runs-on: ubuntu-22.04
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Cognee Setup
uses: ./.github/actions/cognee_setup
with:
python-version: '3.11.x'
extra-dependencies: "aws"
- name: Run Bedrock API Key Test
env:
LLM_PROVIDER: "bedrock"
LLM_API_KEY: ${{ secrets.BEDROCK_API_KEY }}
LLM_MODEL: "eu.amazon.nova-lite-v1:0"
LLM_MAX_TOKENS: "16384"
AWS_REGION_NAME: "eu-west-1"
EMBEDDING_PROVIDER: "bedrock"
EMBEDDING_API_KEY: ${{ secrets.BEDROCK_API_KEY }}
EMBEDDING_MODEL: "amazon.titan-embed-text-v2:0"
EMBEDDING_DIMENSIONS: "1024"
EMBEDDING_MAX_TOKENS: "8191"
run: poetry run python ./examples/python/simple_example.py
test-bedrock-aws-credentials:
name: Run Bedrock AWS Credentials Test
runs-on: ubuntu-22.04
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Cognee Setup
uses: ./.github/actions/cognee_setup
with:
python-version: '3.11.x'
extra-dependencies: "aws"
- name: Run Bedrock API Key Test
env:
LLM_PROVIDER: "bedrock"
LLM_MODEL: "eu.amazon.nova-lite-v1:0"
LLM_MAX_TOKENS: "16384"
AWS_REGION_NAME: "eu-west-1"
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
EMBEDDING_PROVIDER: "bedrock"
EMBEDDING_API_KEY: ${{ secrets.BEDROCK_API_KEY }}
EMBEDDING_MODEL: "amazon.titan-embed-text-v2:0"
EMBEDDING_DIMENSIONS: "1024"
EMBEDDING_MAX_TOKENS: "8191"
run: poetry run python ./examples/python/simple_example.py
test-bedrock-aws-profile:
name: Run Bedrock AWS Profile Test
runs-on: ubuntu-22.04
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Cognee Setup
uses: ./.github/actions/cognee_setup
with:
python-version: '3.11.x'
- name: Configure AWS Profile
run: |
mkdir -p ~/.aws
cat > ~/.aws/credentials << EOF
[bedrock-test]
aws_access_key_id = ${{ secrets.AWS_ACCESS_KEY_ID }}
aws_secret_access_key = ${{ secrets.AWS_SECRET_ACCESS_KEY }}
EOF
- name: Run Bedrock AWS Profile Test
env:
LLM_PROVIDER: "bedrock"
LLM_MODEL: "eu.amazon.nova-lite-v1:0"
AWS_PROFILE_NAME: "bedrock-test"
AWS_REGION_NAME: "eu-west-1"
EMBEDDING_PROVIDER: "bedrock"
EMBEDDING_MODEL: "amazon.titan-embed-text-v2:0"
EMBEDDING_DIMENSIONS: "1024"
EMBEDDING_MAX_TOKENS: "8191"
run: poetry run python ./examples/python/simple_example.py

View file

@ -139,24 +139,6 @@ jobs:
uses: ./.github/workflows/test_llms.yml
secrets: inherit
bedrock-tests:
name: Bedrock Tests
needs: [basic-tests, e2e-tests]
uses: ./.github/workflows/test_bedrock_api_key.yml
secrets: inherit
bedrock-aws-credentials-tests:
name: Bedrock AWS Credentials Tests
needs: [basic-tests, e2e-tests]
uses: ./.github/workflows/test_bedrock_aws_credentials.yml
secrets: inherit
bedrock-aws-profile-tests:
name: Bedrock AWS Profile Tests
needs: [basic-tests, e2e-tests]
uses: ./.github/workflows/test_bedrock_aws_profile.yml
secrets: inherit
# Ollama tests moved to the end
ollama-tests:
name: Ollama Tests
@ -193,9 +175,6 @@ jobs:
db-examples-tests,
mcp-test,
llm-tests,
bedrock-tests,
bedrock-aws-credentials-tests,
bedrock-aws-profile-tests,
ollama-tests,
relational-db-migration-tests,
docker-compose-test,
@ -218,9 +197,6 @@ jobs:
"${{ needs.db-examples-tests.result }}" == "success" &&
"${{ needs.relational-db-migration-tests.result }}" == "success" &&
"${{ needs.llm-tests.result }}" == "success" &&
"${{ needs.bedrock-tests.result }}" == "success" &&
"${{ needs.bedrock-aws-credentials-tests.result }}" == "success" &&
"${{ needs.bedrock-aws-profile-tests.result }}" == "success" &&
"${{ needs.docker-compose-test.result }}" == "success" &&
"${{ needs.docker-ci-test.result }}" == "success" &&
"${{ needs.ollama-tests.result }}" == "success" ]]; then

View file

@ -3,4 +3,3 @@
from .adapter import BedrockAdapter
__all__ = ["BedrockAdapter"]

View file

@ -1,18 +1,19 @@
import litellm
import instructor
from typing import Type, Optional
from typing import Type
from pydantic import BaseModel
from litellm.exceptions import ContentPolicyViolationError
from instructor.exceptions import InstructorRetryException
from cognee.exceptions import InvalidValueError
from cognee.infrastructure.llm.LLMGateway import LLMGateway
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.llm_interface import (
LLMInterface,
)
from cognee.infrastructure.llm.exceptions import ContentPolicyFilterError
from cognee.infrastructure.llm.exceptions import (
ContentPolicyFilterError,
MissingSystemPromptPathError,
)
from cognee.infrastructure.files.storage.s3_config import get_s3_config
from cognee.infrastructure.files.utils.open_data_file import open_data_file
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.rate_limiter import (
rate_limit_async,
rate_limit_sync,
@ -35,6 +36,7 @@ class BedrockAdapter(LLMInterface):
name = "Bedrock"
model: str
api_key: str
default_instructor_mode = "json_schema_mode"
MAX_RETRIES = 5
@ -42,23 +44,23 @@ class BedrockAdapter(LLMInterface):
self,
model: str,
api_key: str = None,
max_tokens: int = 16384,
max_completion_tokens: int = 16384,
streaming: bool = False,
instructor_mode: str = None,
):
self.aclient = instructor.from_litellm(litellm.acompletion)
self.instructor_mode = instructor_mode if instructor_mode else self.default_instructor_mode
self.aclient = instructor.from_litellm(litellm.acompletion, mode=instructor.Mode(self.instructor_mode))
self.client = instructor.from_litellm(litellm.completion)
self.model = model
self.api_key = api_key
self.max_tokens = max_tokens
self.max_completion_tokens = max_completion_tokens
self.streaming = streaming
def _create_bedrock_request(
self, text_input: str, system_prompt: str, response_model: Type[BaseModel]
) -> dict:
"""Create Bedrock request with authentication and enhanced JSON formatting."""
enhanced_system_prompt = f"""{system_prompt}
IMPORTANT: You must respond with valid JSON only. Do not include any text before or after the JSON. The response must be a valid JSON object that can be parsed directly."""
"""Create Bedrock request with authentication."""
request_params = {
"model": self.model,
@ -66,11 +68,11 @@ IMPORTANT: You must respond with valid JSON only. Do not include any text before
"drop_params": True,
"messages": [
{"role": "user", "content": text_input},
{"role": "system", "content": enhanced_system_prompt},
{"role": "system", "content": system_prompt},
],
"response_model": response_model,
"max_retries": self.MAX_RETRIES,
"max_tokens": self.max_tokens,
"max_completion_tokens": self.max_completion_tokens,
"stream": self.streaming,
}
@ -87,9 +89,10 @@ IMPORTANT: You must respond with valid JSON only. Do not include any text before
elif s3_config.aws_profile_name:
request_params["aws_profile_name"] = s3_config.aws_profile_name
if s3_config.aws_region:
request_params["aws_region_name"] = s3_config.aws_region
# Add optional parameters
if s3_config.aws_region_name:
request_params["aws_region_name"] = s3_config.aws_region_name
if s3_config.aws_bedrock_runtime_endpoint:
request_params["aws_bedrock_runtime_endpoint"] = s3_config.aws_bedrock_runtime_endpoint
@ -137,7 +140,7 @@ IMPORTANT: You must respond with valid JSON only. Do not include any text before
if not text_input:
text_input = "No user input provided."
if not system_prompt:
raise InvalidValueError(message="No system prompt path provided.")
raise MissingSystemPromptPathError()
system_prompt = LLMGateway.read_query_prompt(system_prompt)
formatted_prompt = (

View file

@ -172,8 +172,8 @@ def get_llm_client(raise_api_key_error: bool = True):
)
elif provider == LLMProvider.BEDROCK:
if llm_config.llm_api_key is None and raise_api_key_error:
raise LLMAPIKeyNotSetError()
# if llm_config.llm_api_key is None and raise_api_key_error:
# raise LLMAPIKeyNotSetError()
from cognee.infrastructure.llm.structured_output_framework.litellm_instructor.llm.bedrock.adapter import (
BedrockAdapter,
@ -182,8 +182,9 @@ def get_llm_client(raise_api_key_error: bool = True):
return BedrockAdapter(
model=llm_config.llm_model,
api_key=llm_config.llm_api_key,
max_tokens=max_completion_tokens,
max_completion_tokens=max_completion_tokens,
streaming=llm_config.llm_streaming,
instructor_mode=llm_config.llm_instructor_mode.lower(),
)
else:

View file

@ -164,16 +164,16 @@ def get_settings() -> SettingsDict:
],
"bedrock": [
{
"value": "us.anthropic.claude-3-5-sonnet-20241022-v2:0",
"label": "Claude 3.5 Sonnet",
"value": "eu.anthropic.claude-haiku-4-5-20251001-v1:0",
"label": "Claude 4.5 Sonnet",
},
{
"value": "us.anthropic.claude-3-5-haiku-20241022-v1:0",
"label": "Claude 3.5 Haiku",
"value": "eu.anthropic.claude-haiku-4-5-20251001-v1:0",
"label": "Claude 4.5 Haiku",
},
{
"value": "us.anthropic.claude-3-5-sonnet-20240620-v1:0",
"label": "Claude 3.5 Sonnet (June)",
"value": "eu.amazon.nova-lite-v1:0",
"label": "Amazon Nova Lite",
},
],
},