mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-04-08 03:00:28 -04:00
fix(agent/llm): Include id in tool_calls in prompt
OpenAI requires the `id` property on tool calls in assistant messages. We weren't storing it in the `AssistantChatMessage` that is created from the LLM's response, causing an error when adding those messages back to the prompt. Fix: * Add `id` to `AssistantToolCall` and `AssistantToolCallDict` types in model_providers/schema.py * Amend `_tool_calls_compat_extract_calls` to generate an ID for extracted tool calls --- Co-authored-by: kcze <kpczerwinski@gmail.com>
This commit is contained in:
@@ -758,6 +758,7 @@ def _functions_compat_fix_kwargs(
|
||||
def _tool_calls_compat_extract_calls(response: str) -> Iterator[AssistantToolCall]:
|
||||
import json
|
||||
import re
|
||||
import uuid
|
||||
|
||||
logging.debug(f"Trying to extract tool calls from response:\n{response}")
|
||||
|
||||
@@ -770,6 +771,7 @@ def _tool_calls_compat_extract_calls(response: str) -> Iterator[AssistantToolCal
|
||||
tool_calls: list[AssistantToolCallDict] = json.loads(block.group(1))
|
||||
|
||||
for t in tool_calls:
|
||||
t["id"] = str(uuid.uuid4())
|
||||
t["function"]["arguments"] = str(t["function"]["arguments"]) # HACK
|
||||
|
||||
yield AssistantToolCall.parse_obj(t)
|
||||
|
||||
@@ -77,13 +77,13 @@ class AssistantFunctionCallDict(TypedDict):
|
||||
|
||||
|
||||
class AssistantToolCall(BaseModel):
|
||||
# id: str
|
||||
id: str
|
||||
type: Literal["function"]
|
||||
function: AssistantFunctionCall
|
||||
|
||||
|
||||
class AssistantToolCallDict(TypedDict):
|
||||
# id: str
|
||||
id: str
|
||||
type: Literal["function"]
|
||||
function: AssistantFunctionCallDict
|
||||
|
||||
|
||||
Reference in New Issue
Block a user