fix(agent/llm): Include id in tool_calls in prompt

OpenAI requires the `id` property on tool calls in assistant messages.
We weren't storing it in the `AssistantChatMessage` that is created from the LLM's response,
causing an error when adding those messages back to the prompt.

Fix:
* Add `id` to `AssistantToolCall` and `AssistantToolCallDict` types in model_providers/schema.py
* Amend `_tool_calls_compat_extract_calls` to generate an ID for extracted tool calls

---
Co-authored-by: kcze <kpczerwinski@gmail.com>
This commit is contained in:
Reinier van der Leer
2024-02-20 13:25:37 +01:00
parent 7689a51f53
commit 8e464c53a8
2 changed files with 4 additions and 2 deletions

View File

@@ -758,6 +758,7 @@ def _functions_compat_fix_kwargs(
def _tool_calls_compat_extract_calls(response: str) -> Iterator[AssistantToolCall]:
import json
import re
import uuid
logging.debug(f"Trying to extract tool calls from response:\n{response}")
@@ -770,6 +771,7 @@ def _tool_calls_compat_extract_calls(response: str) -> Iterator[AssistantToolCal
tool_calls: list[AssistantToolCallDict] = json.loads(block.group(1))
for t in tool_calls:
t["id"] = str(uuid.uuid4())
t["function"]["arguments"] = str(t["function"]["arguments"]) # HACK
yield AssistantToolCall.parse_obj(t)

View File

@@ -77,13 +77,13 @@ class AssistantFunctionCallDict(TypedDict):
class AssistantToolCall(BaseModel):
# id: str
id: str
type: Literal["function"]
function: AssistantFunctionCall
class AssistantToolCallDict(TypedDict):
# id: str
id: str
type: Literal["function"]
function: AssistantFunctionCallDict