From f88289c425314d54637968170961f3584024fb35 Mon Sep 17 00:00:00 2001 From: Igor Ilic Date: Tue, 30 Sep 2025 14:05:12 +0200 Subject: [PATCH] fix: Resolve issue with processing for gpt4 series models --- .../litellm_instructor/llm/openai/adapter.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/openai/adapter.py b/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/openai/adapter.py index 7ea9dedaa..7273de976 100644 --- a/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/openai/adapter.py +++ b/cognee/infrastructure/llm/structured_output_framework/litellm_instructor/llm/openai/adapter.py @@ -73,10 +73,19 @@ class OpenAIAdapter(LLMInterface): fallback_api_key: str = None, fallback_endpoint: str = None, ): - self.aclient = instructor.from_litellm( - litellm.acompletion, mode=instructor.Mode.JSON_SCHEMA - ) - self.client = instructor.from_litellm(litellm.completion, mode=instructor.Mode.JSON_SCHEMA) + # TODO: With gpt5 series models OpenAI expects JSON_SCHEMA as a mode for structured outputs. + # Make sure all new gpt models will work with this mode as well. + if "gpt-5" in model: + self.aclient = instructor.from_litellm( + litellm.acompletion, mode=instructor.Mode.JSON_SCHEMA + ) + self.client = instructor.from_litellm( + litellm.completion, mode=instructor.Mode.JSON_SCHEMA + ) + else: + self.aclient = instructor.from_litellm(litellm.acompletion) + self.client = instructor.from_litellm(litellm.completion) + self.transcription_model = transcription_model self.model = model self.api_key = api_key