From bdf5600471397db2ab38c7c1bcd7f0cbae39f019 Mon Sep 17 00:00:00 2001 From: Jialong Liu <88185941+Galleons2029@users.noreply.github.com> Date: Thu, 6 Nov 2025 19:13:53 +0800 Subject: [PATCH] Remove fallback print statement in openai_client.py Removed print statement for fallback to chat completions. --- graphiti_core/llm_client/openai_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graphiti_core/llm_client/openai_client.py b/graphiti_core/llm_client/openai_client.py index 30f5e722..08e3cbbf 100644 --- a/graphiti_core/llm_client/openai_client.py +++ b/graphiti_core/llm_client/openai_client.py @@ -116,7 +116,7 @@ class OpenAIClient(BaseOpenAIClient): # Some providers require a schema name; use model class name by default schema_name = getattr(response_model, '__name__', 'structured_response') - print(f'Falling back to chat.completions with JSON schema for model {model}...') + # Falling back to chat.completions with JSON schema for model completion = await self.client.chat.completions.create( model=model, messages=messages,