fix: test llm connection with gemini (#557)

<!-- .github/pull_request_template.md -->

## Description
Temporary fix for Gemini LLM until they allow empty dictionaries in
model schema definition

## DCO Affirmation
I affirm that all code in every commit of this pull request conforms to
the terms of the Topoteretes Developer Certificate of Origin


<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->

## Summary by CodeRabbit

- **New Features**
- AI responses now adjust their format dynamically based on the type of
output, providing a streamlined text display when appropriate.
- Extended processing time improves the handling of longer operations
for a more reliable interaction.

- **Bug Fixes**
- Enhanced error management during connectivity tests ensures a more
robust and stable user experience.

<!-- end of auto-generated comment: release notes by coderabbit.ai -->

---------

Co-authored-by: Boris <boris@topoteretes.com>
This commit is contained in:
Igor Ilic 2025-02-20 11:41:29 +01:00 committed by GitHub
parent 45f7c63322
commit f2e0f47565
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 70 additions and 59 deletions

View file

@ -41,6 +41,10 @@ class GeminiAdapter(LLMInterface):
self, text_input: str, system_prompt: str, response_model: Type[BaseModel] self, text_input: str, system_prompt: str, response_model: Type[BaseModel]
) -> BaseModel: ) -> BaseModel:
try: try:
if response_model is str:
simplified_prompt = system_prompt
response_schema = {"type": "string"}
else:
response_schema = { response_schema = {
"type": "object", "type": "object",
"properties": { "properties": {
@ -69,7 +73,11 @@ class GeminiAdapter(LLMInterface):
"target_node_id": {"type": "string"}, "target_node_id": {"type": "string"},
"relationship_name": {"type": "string"}, "relationship_name": {"type": "string"},
}, },
"required": ["source_node_id", "target_node_id", "relationship_name"], "required": [
"source_node_id",
"target_node_id",
"relationship_name",
],
}, },
}, },
}, },
@ -77,16 +85,16 @@ class GeminiAdapter(LLMInterface):
} }
simplified_prompt = f""" simplified_prompt = f"""
{system_prompt} {system_prompt}
IMPORTANT: Your response must be a valid JSON object with these required fields: IMPORTANT: Your response must be a valid JSON object with these required fields:
1. summary: A brief summary 1. summary: A brief summary
2. description: A detailed description 2. description: A detailed description
3. nodes: Array of nodes with name, type, description, id, and label 3. nodes: Array of nodes with name, type, description, id, and label
4. edges: Array of edges with source_node_id, target_node_id, and relationship_name 4. edges: Array of edges with source_node_id, target_node_id, and relationship_name
Example structure: Example structure:
{{ {{
"summary": "Brief summary", "summary": "Brief summary",
"description": "Detailed description", "description": "Detailed description",
"nodes": [ "nodes": [
@ -105,7 +113,7 @@ Example structure:
"relationship_name": "relates_to" "relationship_name": "relates_to"
}} }}
] ]
}}""" }}"""
messages = [ messages = [
{"role": "system", "content": simplified_prompt}, {"role": "system", "content": simplified_prompt},
@ -120,12 +128,14 @@ Example structure:
max_tokens=self.max_tokens, max_tokens=self.max_tokens,
temperature=0.1, temperature=0.1,
response_format={"type": "json_object", "schema": response_schema}, response_format={"type": "json_object", "schema": response_schema},
timeout=10, timeout=100,
num_retries=self.MAX_RETRIES, num_retries=self.MAX_RETRIES,
) )
if response.choices and response.choices[0].message.content: if response.choices and response.choices[0].message.content:
content = response.choices[0].message.content content = response.choices[0].message.content
if response_model is str:
return content
return response_model.model_validate_json(content) return response_model.model_validate_json(content)
except litellm.exceptions.BadRequestError as e: except litellm.exceptions.BadRequestError as e:

View file

@ -46,6 +46,7 @@ async def test_llm_connection():
system_prompt='Respond to me with the following string: "test"', system_prompt='Respond to me with the following string: "test"',
response_model=str, response_model=str,
) )
except Exception as e: except Exception as e:
logger.error(e) logger.error(e)
logger.error("Connection to LLM could not be established.") logger.error("Connection to LLM could not be established.")