From f7357499bed782c608fab8fef8cc8ea1a8d4839b Mon Sep 17 00:00:00 2001 From: afourney Date: Tue, 9 Jul 2024 10:46:55 -0700 Subject: [PATCH] Teamone utils (#192) * Added initial code for TeamOne utils. * Fixed hatch errors. * Updated examples. * Fixed more hatch errors. * Improve readme --------- Co-authored-by: gagb --- .../teams/team-one/examples/example_coder.py | 7 +- .../team-one/examples/example_file_surfer.py | 5 +- .../team-one/examples/example_userproxy.py | 17 +---- python/teams/team-one/readme.md | 38 +++++++++++ python/teams/team-one/src/team_one/utils.py | 66 +++++++++++++++++++ 5 files changed, 113 insertions(+), 20 deletions(-) create mode 100755 python/teams/team-one/src/team_one/utils.py diff --git a/python/teams/team-one/examples/example_coder.py b/python/teams/team-one/examples/example_coder.py index bda4e869f..45c19a273 100644 --- a/python/teams/team-one/examples/example_coder.py +++ b/python/teams/team-one/examples/example_coder.py @@ -1,22 +1,21 @@ import asyncio from agnext.application import SingleThreadedAgentRuntime -from agnext.components.models import OpenAIChatCompletionClient, UserMessage +from agnext.components.models import UserMessage from team_one.agents.coder import Coder, Executor from team_one.agents.orchestrator import RoundRobinOrchestrator from team_one.messages import BroadcastMessage +from team_one.utils import create_completion_client_from_env async def main() -> None: # Create the runtime. runtime = SingleThreadedAgentRuntime() - client = OpenAIChatCompletionClient(model="gpt-4o") - # Register agents. coder = runtime.register_and_get_proxy( "Coder", - lambda: Coder(model_client=client), + lambda: Coder(model_client=create_completion_client_from_env()), ) executor = runtime.register_and_get_proxy("Executor", lambda: Executor("A agent for executing code")) diff --git a/python/teams/team-one/examples/example_file_surfer.py b/python/teams/team-one/examples/example_file_surfer.py index b9f884503..4ea9789d6 100644 --- a/python/teams/team-one/examples/example_file_surfer.py +++ b/python/teams/team-one/examples/example_file_surfer.py @@ -1,9 +1,10 @@ import asyncio from agnext.application import SingleThreadedAgentRuntime -from agnext.components.models import OpenAIChatCompletionClient, UserMessage +from agnext.components.models import UserMessage from team_one.agents.file_surfer import FileSurfer from team_one.messages import BroadcastMessage, RequestReplyMessage +from team_one.utils import create_completion_client_from_env async def main() -> None: @@ -13,7 +14,7 @@ async def main() -> None: # Register agents. file_surfer = runtime.register_and_get( "file_surfer", - lambda: FileSurfer(model_client=OpenAIChatCompletionClient(model="gpt-4o")), + lambda: FileSurfer(model_client=create_completion_client_from_env()), ) task = input(f"Enter a task for {file_surfer.name}: ") msg = BroadcastMessage(content=UserMessage(content=task, source="human")) diff --git a/python/teams/team-one/examples/example_userproxy.py b/python/teams/team-one/examples/example_userproxy.py index b4ba9c900..21a80c7ac 100644 --- a/python/teams/team-one/examples/example_userproxy.py +++ b/python/teams/team-one/examples/example_userproxy.py @@ -4,30 +4,19 @@ import logging # from typing import Any, Dict, List, Tuple, Union from agnext.application import SingleThreadedAgentRuntime from agnext.application.logging import EVENT_LOGGER_NAME -from agnext.components.models import ( - AzureOpenAIChatCompletionClient, - ModelCapabilities, -) -from azure.identity import DefaultAzureCredential, get_bearer_token_provider from team_one.agents.coder import Coder from team_one.agents.orchestrator import RoundRobinOrchestrator from team_one.agents.user_proxy import UserProxy from team_one.messages import OrchestrationEvent, RequestReplyMessage +from team_one.utils import create_completion_client_from_env async def main() -> None: # Create the runtime. runtime = SingleThreadedAgentRuntime() - # Create the AzureOpenAI client, with AAD auth - token_provider = get_bearer_token_provider(DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default") - client = AzureOpenAIChatCompletionClient( - api_version="2024-02-15-preview", - azure_endpoint="https://aif-complex-tasks-west-us-3.openai.azure.com/", - model="gpt-4o-2024-05-13", - model_capabilities=ModelCapabilities(function_calling=True, json_output=True, vision=True), - azure_ad_token_provider=token_provider, - ) + # Get an appropriate client + client = create_completion_client_from_env() # Register agents. coder = runtime.register_and_get_proxy( diff --git a/python/teams/team-one/readme.md b/python/teams/team-one/readme.md index e69de29bb..6e8154f5c 100644 --- a/python/teams/team-one/readme.md +++ b/python/teams/team-one/readme.md @@ -0,0 +1,38 @@ +# Environment Configuration for Chat Completion Client + +This guide outlines how to configure your environment to use the `create_completion_client_from_env` function, which reads environment variables to return an appropriate `ChatCompletionClient`. + +## Azure with Active Directory + +To configure for Azure with Active Directory, set the following environment variables: + +- `CHAT_COMPLETION_PROVIDER='azure'` +- `CHAT_COMPLETION_KWARGS_JSON` with the following JSON structure: + +```json +{ + "api_version": "2024-02-15-preview", + "azure_endpoint": "REPLACE_WITH_YOUR_ENDPOINT", + "model_capabilities": { + "function_calling": true, + "json_output": true, + "vision": true + }, + "azure_ad_token_provider": "DEFAULT", + "model": "gpt-4o-2024-05-13" +} +``` + +## With OpenAI + +To configure for OpenAI, set the following environment variables: + +- `CHAT_COMPLETION_PROVIDER='openai'` +- `CHAT_COMPLETION_KWARGS_JSON` with the following JSON structure: + +```json +{ + "api_key": "REPLACE_WITH_YOUR_API", + "model": "gpt-4o-2024-05-13" +} +``` diff --git a/python/teams/team-one/src/team_one/utils.py b/python/teams/team-one/src/team_one/utils.py new file mode 100755 index 000000000..2ea08e92c --- /dev/null +++ b/python/teams/team-one/src/team_one/utils.py @@ -0,0 +1,66 @@ +import json +import os +from typing import Any, Dict + +from agnext.components.models import ( + AzureOpenAIChatCompletionClient, + ChatCompletionClient, + ModelCapabilities, + OpenAIChatCompletionClient, +) + +ENVIRON_KEY_CHAT_COMPLETION_PROVIDER = "CHAT_COMPLETION_PROVIDER" +ENVIRON_KEY_CHAT_COMPLETION_KWARGS_JSON = "CHAT_COMPLETION_KWARGS_JSON" + +# The singleton _default_azure_ad_token_provider, which will be created if needed +_default_azure_ad_token_provider = None + + +# Create a model client based on information provided in environment variables. +def create_completion_client_from_env(env: Dict[str, str] | None = None, **kwargs: Any) -> ChatCompletionClient: + global _default_azure_ad_token_provider + + """ + Create a model client based on information provided in environment variables. + env (Optional): When provied, read from this dictionary rather than os.environ + kwargs**: ChatClient arguments to override (e.g., model) + + NOTE: If 'azure_ad_token_provider' is included, and euquals the string 'DEFAULT' then replace it with + azure.identity.get_bearer_token_provider(DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default") + """ + + # If a dictionary was not provided, load it from the environment + if env is None: + env = dict() + env.update(os.environ) + + # Load the kwargs, and override with provided kwargs + _kwargs = json.loads(env.get(ENVIRON_KEY_CHAT_COMPLETION_KWARGS_JSON, "{}")) + _kwargs.update(kwargs) + + # If model capabilities were provided, deserialize them as well + if "model_capabilities" in _kwargs: + _kwargs["model_capabilities"] = ModelCapabilities( + vision=_kwargs["model_capabilities"].get("vision"), + function_calling=_kwargs["model_capabilities"].get("function_calling"), + json_output=_kwargs["model_capabilities"].get("json_output"), + ) + + # Figure out what provider we are using. Default to OpenAI + _provider = env.get(ENVIRON_KEY_CHAT_COMPLETION_PROVIDER, "openai").lower().strip() + + # Instantiate the correct client + if _provider == "openai": + return OpenAIChatCompletionClient(**_kwargs) + elif _provider == "azure": + if _kwargs.get("azure_ad_token_provider", "").lower() == "default": + if _default_azure_ad_token_provider is None: + from azure.identity import DefaultAzureCredential, get_bearer_token_provider + + _default_azure_ad_token_provider = get_bearer_token_provider( + DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default" + ) + _kwargs["azure_ad_token_provider"] = _default_azure_ad_token_provider + return AzureOpenAIChatCompletionClient(**_kwargs) + else: + raise ValueError(f"Unknown OAI provider '{_provider}'")