mirror of
https://github.com/microsoft/autogen.git
synced 2026-04-20 03:02:16 -04:00
migrate models (#3848)
* migrate models * Update python/packages/autogen-agentchat/src/autogen_agentchat/agents/_tool_use_assistant_agent.py Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com> * refactor missing imports * ignore type check errors * Update python/packages/autogen-ext/src/autogen_ext/models/_openai/_model_info.py Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com> * update packages index page --------- Co-authored-by: Leonardo Pinheiro <lpinheiro@microsoft.com> Co-authored-by: Eric Zhu <ekzhu@users.noreply.github.com>
This commit is contained in:
committed by
GitHub
parent
b7509b3659
commit
38f62e1609
@@ -65,9 +65,10 @@ pip install autogen-ext==0.4.0dev1
|
||||
|
||||
Extras:
|
||||
|
||||
- `langchain-tools` needed for {py:class}`~autogen_ext.tools.LangChainToolAdapter`
|
||||
- `azure-code-executor` needed for {py:class}`~autogen_ext.code_executors.ACADynamicSessionsCodeExecutor`
|
||||
- `docker-code-executor` needed for {py:class}`~autogen_ext.code_executors.DockerCommandLineCodeExecutor`
|
||||
- `langchain` needed for {py:class}`~autogen_ext.tools.LangChainToolAdapter`
|
||||
- `azure` needed for {py:class}`~autogen_ext.code_executors.ACADynamicSessionsCodeExecutor`
|
||||
- `docker` needed for {py:class}`~autogen_ext.code_executors.DockerCommandLineCodeExecutor`
|
||||
- `openai` needed for {py:class}`~autogen_ext.models.OpenAIChatCompletionClient`
|
||||
|
||||
[{fas}`circle-info;pst-color-primary` User Guide](/user-guide/extensions-user-guide/index.md) | [{fas}`file-code;pst-color-primary` API Reference](/reference/python/autogen_ext/autogen_ext.rst) | [{fab}`python;pst-color-primary` PyPI](https://pypi.org/project/autogen-ext/0.4.0.dev1/) | [{fab}`github;pst-color-primary` Source](https://github.com/microsoft/autogen/tree/main/python/packages/autogen-ext)
|
||||
:::
|
||||
|
||||
@@ -24,8 +24,8 @@
|
||||
"source": [
|
||||
"from autogen_agentchat.agents import CodingAssistantAgent, ToolUseAssistantAgent\n",
|
||||
"from autogen_agentchat.teams import RoundRobinGroupChat, StopMessageTermination\n",
|
||||
"from autogen_core.components.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_core.components.tools import FunctionTool"
|
||||
"from autogen_core.components.tools import FunctionTool\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -24,8 +24,8 @@
|
||||
"source": [
|
||||
"from autogen_agentchat.agents import CodingAssistantAgent, ToolUseAssistantAgent\n",
|
||||
"from autogen_agentchat.teams import RoundRobinGroupChat, StopMessageTermination\n",
|
||||
"from autogen_core.components.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_core.components.tools import FunctionTool"
|
||||
"from autogen_core.components.tools import FunctionTool\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"source": [
|
||||
"from autogen_agentchat.agents import CodingAssistantAgent\n",
|
||||
"from autogen_agentchat.teams import RoundRobinGroupChat, StopMessageTermination\n",
|
||||
"from autogen_core.components.models import OpenAIChatCompletionClient"
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -50,8 +50,8 @@
|
||||
")\n",
|
||||
"from autogen_agentchat.teams import SelectorGroupChat, StopMessageTermination\n",
|
||||
"from autogen_core.base import CancellationToken\n",
|
||||
"from autogen_core.components.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_core.components.tools import FunctionTool"
|
||||
"from autogen_core.components.tools import FunctionTool\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -15,7 +15,7 @@ pip install azure-identity
|
||||
## Using the Model Client
|
||||
|
||||
```python
|
||||
from autogen_core.components.models import AzureOpenAIChatCompletionClient
|
||||
from autogen_ext.models import AzureOpenAIChatCompletionClient
|
||||
from azure.identity import DefaultAzureCredential, get_bearer_token_provider
|
||||
|
||||
# Create the token provider
|
||||
|
||||
@@ -65,7 +65,8 @@
|
||||
"import os\n",
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from autogen_core.components.models import AzureOpenAIChatCompletionClient, UserMessage\n",
|
||||
"from autogen_core.components.models import UserMessage\n",
|
||||
"from autogen_ext.models import AzureOpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Function to get environment variable and ensure it is not None\n",
|
||||
|
||||
@@ -26,13 +26,13 @@
|
||||
"from autogen_core.components.models import (\n",
|
||||
" ChatCompletionClient,\n",
|
||||
" LLMMessage,\n",
|
||||
" OpenAIChatCompletionClient,\n",
|
||||
" SystemMessage,\n",
|
||||
" UserMessage,\n",
|
||||
")\n",
|
||||
"from autogen_core.components.tool_agent import ToolAgent, ToolException, tool_agent_caller_loop\n",
|
||||
"from autogen_core.components.tools import PythonCodeExecutionTool, ToolSchema\n",
|
||||
"from autogen_ext.code_executors import DockerCommandLineCodeExecutor"
|
||||
"from autogen_ext.code_executors import DockerCommandLineCodeExecutor\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -86,11 +86,11 @@
|
||||
" AssistantMessage,\n",
|
||||
" ChatCompletionClient,\n",
|
||||
" LLMMessage,\n",
|
||||
" OpenAIChatCompletionClient,\n",
|
||||
" SystemMessage,\n",
|
||||
" UserMessage,\n",
|
||||
")\n",
|
||||
"from autogen_core.components.tools import FunctionTool\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from IPython.display import display # type: ignore\n",
|
||||
"from pydantic import BaseModel\n",
|
||||
"from rich.console import Console\n",
|
||||
|
||||
@@ -65,11 +65,11 @@
|
||||
" FunctionExecutionResult,\n",
|
||||
" FunctionExecutionResultMessage,\n",
|
||||
" LLMMessage,\n",
|
||||
" OpenAIChatCompletionClient,\n",
|
||||
" SystemMessage,\n",
|
||||
" UserMessage,\n",
|
||||
")\n",
|
||||
"from autogen_core.components.tools import FunctionTool, Tool\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from pydantic import BaseModel"
|
||||
]
|
||||
},
|
||||
@@ -459,7 +459,7 @@
|
||||
"We have defined the AI agents, the Human Agent, the User Agent, the tools, and the topic types.\n",
|
||||
"Now we can create the team of agents.\n",
|
||||
"\n",
|
||||
"For the AI agents, we use the {py:class}`~autogen_core.components.models.OpenAIChatCompletionClient`\n",
|
||||
"For the AI agents, we use the {py:class}~autogen_ext.models.OpenAIChatCompletionClient`\n",
|
||||
"and `gpt-4o-mini` model.\n",
|
||||
"\n",
|
||||
"After creating the agent runtime, we register each of the agent by providing\n",
|
||||
|
||||
@@ -444,7 +444,7 @@
|
||||
"source": [
|
||||
"from autogen_core.application import SingleThreadedAgentRuntime\n",
|
||||
"from autogen_core.components import DefaultTopicId\n",
|
||||
"from autogen_core.components.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"runtime = SingleThreadedAgentRuntime()\n",
|
||||
"await ReviewerAgent.register(\n",
|
||||
|
||||
@@ -18,11 +18,11 @@
|
||||
"## Built-in Model Clients\n",
|
||||
"\n",
|
||||
"Currently there are two built-in model clients:\n",
|
||||
"{py:class}`~autogen_core.components.models.OpenAIChatCompletionClient` and\n",
|
||||
"{py:class}`~autogen_core.components.models.AzureOpenAIChatCompletionClient`.\n",
|
||||
"{py:class}~autogen_ext.models.OpenAIChatCompletionClient` and\n",
|
||||
"{py:class}`~autogen_ext.models.AzureOpenAIChatCompletionClient`.\n",
|
||||
"Both clients are asynchronous.\n",
|
||||
"\n",
|
||||
"To use the {py:class}`~autogen_core.components.models.OpenAIChatCompletionClient`, you need to provide the API key\n",
|
||||
"To use the {py:class}~autogen_ext.models.OpenAIChatCompletionClient`, you need to provide the API key\n",
|
||||
"either through the environment variable `OPENAI_API_KEY` or through the `api_key` argument."
|
||||
]
|
||||
},
|
||||
@@ -32,7 +32,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from autogen_core.components.models import OpenAIChatCompletionClient, UserMessage\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient, UserMessage\n",
|
||||
"\n",
|
||||
"# Create an OpenAI model client.\n",
|
||||
"model_client = OpenAIChatCompletionClient(\n",
|
||||
@@ -45,7 +45,7 @@
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"You can call the {py:meth}`~autogen_core.components.models.OpenAIChatCompletionClient.create` method to create a\n",
|
||||
"You can call the {py:meth}~autogen_ext.models.OpenAIChatCompletionClient.create` method to create a\n",
|
||||
"chat completion request, and await for an {py:class}`~autogen_core.components.models.CreateResult` object in return."
|
||||
]
|
||||
},
|
||||
@@ -79,7 +79,7 @@
|
||||
"source": [
|
||||
"### Streaming Response\n",
|
||||
"\n",
|
||||
"You can use the {py:meth}`~autogen_core.components.models.OpenAIChatCompletionClient.create_streaming` method to create a\n",
|
||||
"You can use the {py:meth}~autogen_ext.models.OpenAIChatCompletionClient.create_streaming` method to create a\n",
|
||||
"chat completion request with streaming response."
|
||||
]
|
||||
},
|
||||
@@ -151,7 +151,7 @@
|
||||
"source": [
|
||||
"### Azure OpenAI\n",
|
||||
"\n",
|
||||
"To use the {py:class}`~autogen_core.components.models.AzureOpenAIChatCompletionClient`, you need to provide\n",
|
||||
"To use the {py:class}`~autogen_ext.models.AzureOpenAIChatCompletionClient`, you need to provide\n",
|
||||
"the deployment id, Azure Cognitive Services endpoint, api version, and model capabilities.\n",
|
||||
"For authentication, you can either provide an API key or an Azure Active Directory (AAD) token credential.\n",
|
||||
"To use AAD authentication, you need to first install the `azure-identity` package."
|
||||
@@ -184,7 +184,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from autogen_core.components.models import AzureOpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models import AzureOpenAIChatCompletionClient\n",
|
||||
"from azure.identity import DefaultAzureCredential, get_bearer_token_provider\n",
|
||||
"\n",
|
||||
"# Create the token provider\n",
|
||||
|
||||
@@ -312,8 +312,8 @@
|
||||
"import tempfile\n",
|
||||
"\n",
|
||||
"from autogen_core.application import SingleThreadedAgentRuntime\n",
|
||||
"from autogen_core.components.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.code_executors import DockerCommandLineCodeExecutor\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"work_dir = tempfile.mkdtemp()\n",
|
||||
"\n",
|
||||
|
||||
Reference in New Issue
Block a user