mirror of
https://github.com/microsoft/autogen.git
synced 2026-02-10 03:35:02 -05:00
Add models.openai and tools.langchain namespaces (#4601)
* add models.openai namespace * refactor tools namespace * update lock file * revert pyproject changes * update docs and add cast * update ext models doc ref * increase underline * add reply models namespace * update imports * fix test * linting * fix missing conflicts * revert pydantic changes * rename to replay * replay * fix reply * Fix test * formatting * example --------- Co-authored-by: Leonardo Pinheiro <lpinheiro@microsoft.com> Co-authored-by: Jack Gerrits <jack@jackgerrits.com> Co-authored-by: Jack Gerrits <jackgerrits@users.noreply.github.com>
This commit is contained in:
committed by
GitHub
parent
3e5e12bff0
commit
253fe216fd
@@ -45,8 +45,9 @@ python/autogen_ext.agents.web_surfer
|
||||
python/autogen_ext.agents.file_surfer
|
||||
python/autogen_ext.agents.video_surfer
|
||||
python/autogen_ext.agents.video_surfer.tools
|
||||
python/autogen_ext.models
|
||||
python/autogen_ext.tools
|
||||
python/autogen_ext.models.openai
|
||||
python/autogen_ext.models.replay
|
||||
python/autogen_ext.tools.langchain
|
||||
python/autogen_ext.code_executors.local
|
||||
python/autogen_ext.code_executors.docker
|
||||
python/autogen_ext.code_executors.azure
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
autogen\_ext.models.openai
|
||||
==========================
|
||||
|
||||
|
||||
.. automodule:: autogen_ext.models.openai
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
@@ -0,0 +1,8 @@
|
||||
autogen\_ext.models.replay
|
||||
==========================
|
||||
|
||||
|
||||
.. automodule:: autogen_ext.models.replay
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
@@ -1,8 +0,0 @@
|
||||
autogen\_ext.models
|
||||
===================
|
||||
|
||||
|
||||
.. automodule:: autogen_ext.models
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
@@ -27,7 +27,7 @@
|
||||
"from autogen_agentchat.teams import RoundRobinGroupChat\n",
|
||||
"from autogen_agentchat.ui import Console\n",
|
||||
"from autogen_core.tools import FunctionTool\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -27,7 +27,7 @@
|
||||
"from autogen_agentchat.teams import RoundRobinGroupChat\n",
|
||||
"from autogen_agentchat.ui import Console\n",
|
||||
"from autogen_core.tools import FunctionTool\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
"from autogen_agentchat.conditions import TextMentionTermination\n",
|
||||
"from autogen_agentchat.teams import RoundRobinGroupChat\n",
|
||||
"from autogen_agentchat.ui import Console\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -76,7 +76,7 @@
|
||||
"from autogen_agentchat.conditions import TextMentionTermination\n",
|
||||
"from autogen_agentchat.teams import RoundRobinGroupChat\n",
|
||||
"from autogen_agentchat.ui import Console\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Define a tool\n",
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"from autogen_agentchat.agents import AssistantAgent\n",
|
||||
"from autogen_agentchat.messages import TextMessage\n",
|
||||
"from autogen_core import CancellationToken\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Define a tool that searches the web for information.\n",
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"opneai_model_client = OpenAIChatCompletionClient(\n",
|
||||
" model=\"gpt-4o-2024-08-06\",\n",
|
||||
@@ -128,7 +128,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from autogen_ext.models import AzureOpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import AzureOpenAIChatCompletionClient\n",
|
||||
"from azure.identity import DefaultAzureCredential, get_bearer_token_provider\n",
|
||||
"\n",
|
||||
"# Create the token provider\n",
|
||||
|
||||
@@ -67,7 +67,7 @@
|
||||
"from autogen_agentchat.messages import AgentMessage\n",
|
||||
"from autogen_agentchat.teams import SelectorGroupChat\n",
|
||||
"from autogen_agentchat.ui import Console\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -39,7 +39,7 @@
|
||||
"from autogen_agentchat.teams import RoundRobinGroupChat\n",
|
||||
"from autogen_agentchat.ui import Console\n",
|
||||
"from autogen_core import CancellationToken\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"assistant_agent = AssistantAgent(\n",
|
||||
" name=\"assistant_agent\",\n",
|
||||
|
||||
@@ -100,7 +100,7 @@
|
||||
"from autogen_agentchat.messages import HandoffMessage\n",
|
||||
"from autogen_agentchat.teams import Swarm\n",
|
||||
"from autogen_agentchat.ui import Console\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"from autogen_agentchat.agents import AssistantAgent\n",
|
||||
"from autogen_agentchat.conditions import TextMentionTermination\n",
|
||||
"from autogen_agentchat.teams import RoundRobinGroupChat\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"# Create an OpenAI model client.\n",
|
||||
"model_client = OpenAIChatCompletionClient(\n",
|
||||
@@ -260,7 +260,7 @@
|
||||
"from autogen_agentchat.conditions import MaxMessageTermination, TextMentionTermination\n",
|
||||
"from autogen_agentchat.teams import RoundRobinGroupChat\n",
|
||||
"from autogen_agentchat.ui import Console\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"# Create an OpenAI model client.\n",
|
||||
"model_client = OpenAIChatCompletionClient(\n",
|
||||
@@ -633,7 +633,7 @@
|
||||
"from autogen_agentchat.base import Handoff\n",
|
||||
"from autogen_agentchat.conditions import HandoffTermination, TextMentionTermination\n",
|
||||
"from autogen_agentchat.teams import RoundRobinGroupChat\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"# Create an OpenAI model client.\n",
|
||||
"model_client = OpenAIChatCompletionClient(\n",
|
||||
|
||||
@@ -58,7 +58,7 @@
|
||||
"from autogen_agentchat.conditions import MaxMessageTermination, TextMentionTermination\n",
|
||||
"from autogen_agentchat.teams import RoundRobinGroupChat\n",
|
||||
"from autogen_agentchat.ui import Console\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"model_client = OpenAIChatCompletionClient(\n",
|
||||
" model=\"gpt-4o\",\n",
|
||||
|
||||
@@ -15,7 +15,7 @@ pip install azure-identity
|
||||
## Using the Model Client
|
||||
|
||||
```python
|
||||
from autogen_ext.models import AzureOpenAIChatCompletionClient
|
||||
from autogen_ext.models.openai import AzureOpenAIChatCompletionClient
|
||||
from azure.identity import DefaultAzureCredential, get_bearer_token_provider
|
||||
|
||||
# Create the token provider
|
||||
|
||||
@@ -55,7 +55,7 @@
|
||||
" SystemMessage,\n",
|
||||
" UserMessage,\n",
|
||||
")\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -66,7 +66,7 @@
|
||||
"from typing import Optional\n",
|
||||
"\n",
|
||||
"from autogen_core.models import UserMessage\n",
|
||||
"from autogen_ext.models import AzureOpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import AzureOpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Function to get environment variable and ensure it is not None\n",
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
"from autogen_core.tool_agent import ToolAgent, ToolException, tool_agent_caller_loop\n",
|
||||
"from autogen_core.tools import PythonCodeExecutionTool, ToolSchema\n",
|
||||
"from autogen_ext.code_executors.docker import DockerCommandLineCodeExecutor\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -91,7 +91,7 @@
|
||||
" UserMessage,\n",
|
||||
")\n",
|
||||
"from autogen_core.tools import FunctionTool\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"from IPython.display import display # type: ignore\n",
|
||||
"from pydantic import BaseModel\n",
|
||||
"from rich.console import Console\n",
|
||||
|
||||
@@ -75,7 +75,7 @@
|
||||
" UserMessage,\n",
|
||||
")\n",
|
||||
"from autogen_core.tools import FunctionTool, Tool\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"from pydantic import BaseModel"
|
||||
]
|
||||
},
|
||||
@@ -296,7 +296,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 33,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -59,7 +59,7 @@
|
||||
" SystemMessage,\n",
|
||||
" UserMessage,\n",
|
||||
")\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -442,7 +442,7 @@
|
||||
],
|
||||
"source": [
|
||||
"from autogen_core import DefaultTopicId, SingleThreadedAgentRuntime\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"runtime = SingleThreadedAgentRuntime()\n",
|
||||
"await ReviewerAgent.register(\n",
|
||||
|
||||
@@ -58,7 +58,7 @@
|
||||
" type_subscription,\n",
|
||||
")\n",
|
||||
"from autogen_core.models import ChatCompletionClient, SystemMessage, UserMessage\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient"
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -46,7 +46,7 @@ Model capabilites are additional capabilities an LLM may have beyond the standar
|
||||
Model capabilities can be passed into a model, which will override the default definitions. These capabilities will not affect what the underlying model is actually capable of, but will allow or disallow behaviors associated with them. This is particularly useful when [using local LLMs](cookbook/local-llms-ollama-litellm.ipynb).
|
||||
|
||||
```python
|
||||
from autogen_ext.models import OpenAIChatCompletionClient
|
||||
from autogen_ext.models.openai import OpenAIChatCompletionClient
|
||||
|
||||
client = OpenAIChatCompletionClient(
|
||||
model="gpt-4o",
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from autogen_core.models import UserMessage\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"# Create an OpenAI model client.\n",
|
||||
"model_client = OpenAIChatCompletionClient(\n",
|
||||
@@ -290,7 +290,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from autogen_ext.models import AzureOpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import AzureOpenAIChatCompletionClient\n",
|
||||
"from azure.identity import DefaultAzureCredential, get_bearer_token_provider\n",
|
||||
"\n",
|
||||
"# Create the token provider\n",
|
||||
@@ -334,7 +334,7 @@
|
||||
"\n",
|
||||
"from autogen_core import MessageContext, RoutedAgent, SingleThreadedAgentRuntime, message_handler\n",
|
||||
"from autogen_core.models import ChatCompletionClient, SystemMessage, UserMessage\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"@dataclass\n",
|
||||
|
||||
@@ -171,7 +171,7 @@
|
||||
")\n",
|
||||
"from autogen_core.tool_agent import ToolAgent, tool_agent_caller_loop\n",
|
||||
"from autogen_core.tools import FunctionTool, Tool, ToolSchema\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"@dataclass\n",
|
||||
|
||||
@@ -324,7 +324,7 @@
|
||||
"\n",
|
||||
"from autogen_core import SingleThreadedAgentRuntime\n",
|
||||
"from autogen_ext.code_executors import DockerCommandLineCodeExecutor\n",
|
||||
"from autogen_ext.models import OpenAIChatCompletionClient\n",
|
||||
"from autogen_ext.models.openai import OpenAIChatCompletionClient\n",
|
||||
"\n",
|
||||
"work_dir = tempfile.mkdtemp()\n",
|
||||
"\n",
|
||||
|
||||
@@ -9,7 +9,7 @@ from autogen_core.models import (
|
||||
LLMMessage,
|
||||
UserMessage,
|
||||
)
|
||||
from autogen_ext.models import AzureOpenAIChatCompletionClient, OpenAIChatCompletionClient
|
||||
from autogen_ext.models.openai import AzureOpenAIChatCompletionClient, OpenAIChatCompletionClient
|
||||
from azure.identity import DefaultAzureCredential, get_bearer_token_provider
|
||||
from typing_extensions import Literal
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Dict
|
||||
from autogen_core.models import (
|
||||
LLMMessage,
|
||||
)
|
||||
from autogen_ext.models import AzureOpenAIClientConfiguration
|
||||
from autogen_ext.models.openai import AzureOpenAIClientConfiguration
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from typing import Any, Iterable, Type
|
||||
import yaml
|
||||
from _types import AppConfig
|
||||
from autogen_core import MessageSerializer, try_get_known_serializers_for_type
|
||||
from autogen_ext.models import AzureOpenAIClientConfiguration
|
||||
from autogen_ext.models.openai import AzureOpenAIClientConfiguration
|
||||
from azure.identity import DefaultAzureCredential, get_bearer_token_provider
|
||||
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from _utils import get_serializers, load_config, set_all_log_levels
|
||||
from autogen_core import (
|
||||
TypeSubscription,
|
||||
)
|
||||
from autogen_ext.models import AzureOpenAIChatCompletionClient
|
||||
from autogen_ext.models.openai import AzureOpenAIChatCompletionClient
|
||||
from autogen_ext.runtimes.grpc import GrpcWorkerAgentRuntime
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
|
||||
@@ -8,7 +8,7 @@ from _utils import get_serializers, load_config, set_all_log_levels
|
||||
from autogen_core import (
|
||||
TypeSubscription,
|
||||
)
|
||||
from autogen_ext.models import AzureOpenAIChatCompletionClient
|
||||
from autogen_ext.models.openai import AzureOpenAIChatCompletionClient
|
||||
from autogen_ext.runtimes.grpc import GrpcWorkerAgentRuntime
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
|
||||
@@ -8,7 +8,7 @@ from _utils import get_serializers, load_config, set_all_log_levels
|
||||
from autogen_core import (
|
||||
TypeSubscription,
|
||||
)
|
||||
from autogen_ext.models import AzureOpenAIChatCompletionClient
|
||||
from autogen_ext.models.openai import AzureOpenAIChatCompletionClient
|
||||
from autogen_ext.runtimes.grpc import GrpcWorkerAgentRuntime
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
|
||||
Reference in New Issue
Block a user