Native tool call support for Mistral AI API and topic notebook. (#2135)

* Support for Mistral AI API and topic notebook.

* formatting

* formatting
This commit is contained in:
Eric Zhu
2024-03-25 07:35:29 -07:00
committed by GitHub
parent 2f109f5f94
commit c8aca8dbf7
4 changed files with 1042 additions and 13 deletions

View File

@@ -696,8 +696,8 @@ class ConversableAgent(LLMAgent):
id_key = "name"
else:
id_key = "tool_call_id"
func_print = f"***** Response from calling {message['role']} \"{message[id_key]}\" *****"
id = message.get(id_key, "No id found")
func_print = f"***** Response from calling {message['role']} ({id}) *****"
print(colored(func_print, "green"), flush=True)
print(message["content"], flush=True)
print(colored("*" * len(func_print), "green"), flush=True)
@@ -714,7 +714,7 @@ class ConversableAgent(LLMAgent):
if "function_call" in message and message["function_call"]:
function_call = dict(message["function_call"])
func_print = (
f"***** Suggested function Call: {function_call.get('name', '(No function name found)')} *****"
f"***** Suggested function call: {function_call.get('name', '(No function name found)')} *****"
)
print(colored(func_print, "green"), flush=True)
print(
@@ -726,9 +726,9 @@ class ConversableAgent(LLMAgent):
print(colored("*" * len(func_print), "green"), flush=True)
if "tool_calls" in message and message["tool_calls"]:
for tool_call in message["tool_calls"]:
id = tool_call.get("id", "(No id found)")
id = tool_call.get("id", "No tool call id found")
function_call = dict(tool_call.get("function", {}))
func_print = f"***** Suggested tool Call ({id}): {function_call.get('name', '(No function name found)')} *****"
func_print = f"***** Suggested tool call ({id}): {function_call.get('name', '(No function name found)')} *****"
print(colored(func_print, "green"), flush=True)
print(
"Arguments: \n",
@@ -1309,6 +1309,12 @@ class ConversableAgent(LLMAgent):
)
for tool_call in extracted_response.get("tool_calls") or []:
tool_call["function"]["name"] = self._normalize_name(tool_call["function"]["name"])
# Remove id and type if they are not present.
# This is to make the tool call object compatible with Mistral API.
if tool_call.get("id") is None:
tool_call.pop("id")
if tool_call.get("type") is None:
tool_call.pop("type")
return extracted_response
async def a_generate_oai_reply(
@@ -1525,7 +1531,6 @@ class ConversableAgent(LLMAgent):
message = messages[-1]
tool_returns = []
for tool_call in message.get("tool_calls", []):
id = tool_call["id"]
function_call = tool_call.get("function", {})
func = self._function_map.get(function_call.get("name", None), None)
if inspect.iscoroutinefunction(func):
@@ -1543,13 +1548,24 @@ class ConversableAgent(LLMAgent):
loop.close()
else:
_, func_return = self.execute_function(function_call)
tool_returns.append(
{
"tool_call_id": id,
content = func_return.get("content", "")
if content is None:
content = ""
tool_call_id = tool_call.get("id", None)
if tool_call_id is not None:
tool_call_response = {
"tool_call_id": tool_call_id,
"role": "tool",
"content": func_return.get("content", ""),
"content": content,
}
)
else:
# Do not include tool_call_id if it is not present.
# This is to make the tool call object compatible with Mistral API.
tool_call_response = {
"role": "tool",
"content": content,
}
tool_returns.append(tool_call_response)
if tool_returns:
return True, {
"role": "tool",

2
website/.gitignore vendored
View File

@@ -19,6 +19,8 @@ docs/topics/code-execution/*.mdx
docs/topics/task_decomposition.mdx
docs/topics/prompting-and-reasoning/*.mdx
docs/topics/non-openai-models/*.mdx
docs/topics/non-openai-models/**/*.py
docs/topics/non-openai-models/**/*.svg
# Misc
.DS_Store

View File

@@ -21,7 +21,8 @@ These proxy servers can be cloud-based or running locally within your environmen
By using cloud-based proxy servers, you are able to use models without requiring the hardware
and software to run them.
These providers can host open source/weight models, like [Hugging Face](https://huggingface.co/),
These providers can host open source/weight models, like [Hugging Face](https://huggingface.co/)
and [Mistral AI](https://mistral.ai/),
or their own closed models.
When cloud-based proxy servers provide an OpenAI-compatible API, using them in AutoGen
@@ -32,7 +33,8 @@ authentication which is usually handled through an API key.
Examples of using cloud-based proxy servers providers that have an OpenAI-compatible API
are provided below:
- [together.ai example](/docs/topics/non-openai-models/cloud-togetherai)
- [Together AI example](/docs/topics/non-openai-models/cloud-togetherai)
- [Mistral AI example](/docs/topics/non-openai-models/cloud-mistralai)
### Locally run proxy servers

File diff suppressed because one or more lines are too long