Migrate model context and models modules out of components (#4613)

* Move model context out of components

* move models out of components

* rename docs file
This commit is contained in:
Jack Gerrits
2024-12-09 13:00:08 -05:00
committed by GitHub
parent 3817b8ddf6
commit 87011ae01b
79 changed files with 1527 additions and 1359 deletions

View File

@@ -27,8 +27,8 @@ python/autogen_agentchat.state
python/autogen_core
python/autogen_core.code_executor
python/autogen_core.components.models
python/autogen_core.components.model_context
python/autogen_core.models
python/autogen_core.model_context
python/autogen_core.components.tools
python/autogen_core.components.tool_agent
python/autogen_core.exceptions

View File

@@ -1,8 +1,8 @@
autogen\_core.components.model\_context
autogen\_core.model\_context
=======================================
.. automodule:: autogen_core.components.model_context
.. automodule:: autogen_core.model_context
:members:
:undoc-members:
:show-inheritance:

View File

@@ -1,8 +1,8 @@
autogen\_core.components.models
autogen\_core.models
===============================
.. automodule:: autogen_core.components.models
.. automodule:: autogen_core.models
:members:
:undoc-members:
:show-inheritance:

View File

@@ -73,7 +73,7 @@
}
],
"source": [
"from autogen_core.components.models import UserMessage\n",
"from autogen_core.models import UserMessage\n",
"\n",
"result = await opneai_model_client.create([UserMessage(content=\"What is the capital of France?\", source=\"user\")])\n",
"print(result)"

View File

@@ -49,7 +49,7 @@
" message_handler,\n",
")\n",
"from autogen_core.components.model_context import BufferedChatCompletionContext\n",
"from autogen_core.components.models import (\n",
"from autogen_core.models import (\n",
" AssistantMessage,\n",
" ChatCompletionClient,\n",
" SystemMessage,\n",

View File

@@ -65,7 +65,7 @@
"import os\n",
"from typing import Optional\n",
"\n",
"from autogen_core.components.models import UserMessage\n",
"from autogen_core.models import UserMessage\n",
"from autogen_ext.models import AzureOpenAIChatCompletionClient\n",
"\n",
"\n",

View File

@@ -29,13 +29,13 @@
" message_handler,\n",
")\n",
"from autogen_core.base.intervention import DefaultInterventionHandler, DropMessage\n",
"from autogen_core.components.models import (\n",
"from autogen_core.components.tools import PythonCodeExecutionTool, ToolSchema\n",
"from autogen_core.models import (\n",
" ChatCompletionClient,\n",
" LLMMessage,\n",
" SystemMessage,\n",
" UserMessage,\n",
")\n",
"from autogen_core.components.tools import PythonCodeExecutionTool, ToolSchema\n",
"from autogen_core.tool_agent import ToolAgent, ToolException, tool_agent_caller_loop\n",
"from autogen_ext.code_executors.docker import DockerCommandLineCodeExecutor\n",
"from autogen_ext.models import OpenAIChatCompletionClient"

View File

@@ -54,7 +54,7 @@
")\n",
"from autogen_core._default_subscription import DefaultSubscription\n",
"from autogen_core._default_topic import DefaultTopicId\n",
"from autogen_core.components.models import (\n",
"from autogen_core.models import (\n",
" SystemMessage,\n",
")"
]

View File

@@ -65,7 +65,8 @@
" TypeSubscription,\n",
" message_handler,\n",
")\n",
"from autogen_core.components.models import (\n",
"from autogen_core.components.tools import FunctionTool, Tool\n",
"from autogen_core.models import (\n",
" AssistantMessage,\n",
" ChatCompletionClient,\n",
" FunctionExecutionResult,\n",
@@ -74,7 +75,6 @@
" SystemMessage,\n",
" UserMessage,\n",
")\n",
"from autogen_core.components.tools import FunctionTool, Tool\n",
"from autogen_ext.models import OpenAIChatCompletionClient\n",
"from pydantic import BaseModel"
]
@@ -120,7 +120,7 @@
"\n",
"We start with the `AIAgent` class, which is the class for all AI agents \n",
"(i.e., Triage, Sales, and Issue and Repair Agents) in the multi-agent chatbot.\n",
"An `AIAgent` uses a {py:class}`~autogen_core.components.models.ChatCompletionClient`\n",
"An `AIAgent` uses a {py:class}`~autogen_core.models.ChatCompletionClient`\n",
"to generate responses.\n",
"It can use regular tools directly or delegate tasks to other agents using `delegate_tools`.\n",
"It subscribes to topic type `agent_topic_type` to receive messages from the customer,\n",

View File

@@ -39,7 +39,7 @@
"from typing import List\n",
"\n",
"from autogen_core import AgentId, MessageContext, RoutedAgent, SingleThreadedAgentRuntime, message_handler\n",
"from autogen_core.components.models import ChatCompletionClient, SystemMessage, UserMessage\n",
"from autogen_core.models import ChatCompletionClient, SystemMessage, UserMessage\n",
"from autogen_ext.models import OpenAIChatCompletionClient"
]
},

View File

@@ -52,7 +52,7 @@
" default_subscription,\n",
" message_handler,\n",
")\n",
"from autogen_core.components.models import (\n",
"from autogen_core.models import (\n",
" AssistantMessage,\n",
" ChatCompletionClient,\n",
" LLMMessage,\n",

View File

@@ -101,7 +101,7 @@
"from typing import Dict, List, Union\n",
"\n",
"from autogen_core import MessageContext, RoutedAgent, TopicId, default_subscription, message_handler\n",
"from autogen_core.components.models import (\n",
"from autogen_core.models import (\n",
" AssistantMessage,\n",
" ChatCompletionClient,\n",
" LLMMessage,\n",
@@ -258,7 +258,7 @@
"- It stores message histories for different `CodeWritingTask` in a dictionary,\n",
"so each task has its own history.\n",
"- When making an LLM inference request using its model client, it transforms\n",
"the message history into a list of {py:class}`autogen_core.components.models.LLMMessage` objects\n",
"the message history into a list of {py:class}`autogen_core.models.LLMMessage` objects\n",
"to pass to the model client.\n",
"\n",
"The reviewer agent subscribes to the `CodeReviewTask` message and publishes the `CodeReviewResult` message."

View File

@@ -57,7 +57,7 @@
" message_handler,\n",
" type_subscription,\n",
")\n",
"from autogen_core.components.models import ChatCompletionClient, SystemMessage, UserMessage\n",
"from autogen_core.models import ChatCompletionClient, SystemMessage, UserMessage\n",
"from autogen_ext.models import OpenAIChatCompletionClient"
]
},

View File

@@ -6,9 +6,9 @@
"source": [
"# Model Clients\n",
"\n",
"AutoGen provides the {py:mod}`autogen_core.components.models` module with a suite of built-in\n",
"AutoGen provides the {py:mod}`autogen_core.models` module with a suite of built-in\n",
"model clients for using ChatCompletion API.\n",
"All model clients implement the {py:class}`~autogen_core.components.models.ChatCompletionClient` protocol class."
"All model clients implement the {py:class}`~autogen_core.models.ChatCompletionClient` protocol class."
]
},
{
@@ -32,7 +32,7 @@
"metadata": {},
"outputs": [],
"source": [
"from autogen_core.components.models import UserMessage\n",
"from autogen_core.models import UserMessage\n",
"from autogen_ext.models import OpenAIChatCompletionClient\n",
"\n",
"# Create an OpenAI model client.\n",
@@ -47,7 +47,7 @@
"metadata": {},
"source": [
"You can call the {py:meth}`~autogen_ext.models.OpenAIChatCompletionClient.create` method to create a\n",
"chat completion request, and await for an {py:class}`~autogen_core.components.models.CreateResult` object in return."
"chat completion request, and await for an {py:class}`~autogen_core.models.CreateResult` object in return."
]
},
{
@@ -168,7 +168,7 @@
"source": [
"```{note}\n",
"The last response in the streaming response is always the final response\n",
"of the type {py:class}`~autogen_core.components.models.CreateResult`.\n",
"of the type {py:class}`~autogen_core.models.CreateResult`.\n",
"```\n",
"\n",
"**NB the default usage response is to return zero values**"
@@ -333,7 +333,7 @@
"from dataclasses import dataclass\n",
"\n",
"from autogen_core import MessageContext, RoutedAgent, SingleThreadedAgentRuntime, message_handler\n",
"from autogen_core.components.models import ChatCompletionClient, SystemMessage, UserMessage\n",
"from autogen_core.models import ChatCompletionClient, SystemMessage, UserMessage\n",
"from autogen_ext.models import OpenAIChatCompletionClient\n",
"\n",
"\n",
@@ -474,7 +474,7 @@
"outputs": [],
"source": [
"from autogen_core.components.model_context import BufferedChatCompletionContext\n",
"from autogen_core.components.models import AssistantMessage\n",
"from autogen_core.models import AssistantMessage\n",
"\n",
"\n",
"class SimpleAgentWithContext(RoutedAgent):\n",

View File

@@ -163,13 +163,13 @@
" SingleThreadedAgentRuntime,\n",
" message_handler,\n",
")\n",
"from autogen_core.components.models import (\n",
"from autogen_core.components.tools import FunctionTool, Tool, ToolSchema\n",
"from autogen_core.models import (\n",
" ChatCompletionClient,\n",
" LLMMessage,\n",
" SystemMessage,\n",
" UserMessage,\n",
")\n",
"from autogen_core.components.tools import FunctionTool, Tool, ToolSchema\n",
"from autogen_core.tool_agent import ToolAgent, tool_agent_caller_loop\n",
"from autogen_ext.models import OpenAIChatCompletionClient\n",
"\n",
@@ -267,7 +267,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"This example uses the {py:class}`autogen_core.components.models.OpenAIChatCompletionClient`,\n",
"This example uses the {py:class}`autogen_core.models.OpenAIChatCompletionClient`,\n",
"for Azure OpenAI and other clients, see [Model Clients](./model-clients.ipynb).\n",
"Let's test the agent with a question about stock price."
]

View File

@@ -37,7 +37,7 @@
"\n",
"from autogen_core import DefaultTopicId, MessageContext, RoutedAgent, default_subscription, message_handler\n",
"from autogen_core.code_executor import CodeBlock, CodeExecutor\n",
"from autogen_core.components.models import (\n",
"from autogen_core.models import (\n",
" AssistantMessage,\n",
" ChatCompletionClient,\n",
" LLMMessage,\n",