conductor-checkpoint-msg_01AVxUgejEA9piS6narw4omz

This commit is contained in:
Daniel Chalef 2025-10-30 17:30:36 -07:00
parent 8e44bec395
commit ef8507a9df
2 changed files with 10 additions and 17 deletions

View file

@ -214,22 +214,15 @@ class AnthropicClient(LLMClient):
try: try:
# Create the appropriate tool based on whether response_model is provided # Create the appropriate tool based on whether response_model is provided
tools, tool_choice = self._create_tool(response_model) tools, tool_choice = self._create_tool(response_model)
result = await self.client.messages.create(
# Build the message creation parameters system=system_message.content,
create_params: dict[str, typing.Any] = { max_tokens=max_creation_tokens,
'system': system_message.content, temperature=self.temperature,
'max_tokens': max_creation_tokens, messages=user_messages_cast,
'messages': user_messages_cast, model=self.model,
'model': self.model, tools=tools,
'tools': tools, tool_choice=tool_choice,
'tool_choice': tool_choice, )
}
# Only include temperature if it's not None
if self.temperature is not None:
create_params['temperature'] = self.temperature
result = await self.client.messages.create(**create_params)
# Extract the tool output from the response # Extract the tool output from the response
for content_item in result.content: for content_item in result.content:

View file

@ -39,7 +39,7 @@ class LLMConfig:
api_key: str | None = None, api_key: str | None = None,
model: str | None = None, model: str | None = None,
base_url: str | None = None, base_url: str | None = None,
temperature: float | None = None, temperature: float = DEFAULT_TEMPERATURE,
max_tokens: int = DEFAULT_MAX_TOKENS, max_tokens: int = DEFAULT_MAX_TOKENS,
small_model: str | None = None, small_model: str | None = None,
): ):