Allow m1 cli to read a configuration from a yaml file. (#5341)

Allow m1 cli to read a configuration from a yaml file.
This commit is contained in:
afourney
2025-02-03 12:11:42 -08:00
committed by GitHub
parent 3d00457993
commit cd88757cac
3 changed files with 82 additions and 7 deletions

View File

@@ -15,6 +15,7 @@ classifiers = [
"Operating System :: OS Independent",
]
dependencies = [
"pyyaml>=5.1",
"autogen-agentchat>=0.4.4,<0.5",
"autogen-ext[openai,magentic-one,rich]>=0.4.4,<0.5",
]
@@ -23,8 +24,9 @@ dependencies = [
m1 = "magentic_one_cli._m1:main"
[dependency-groups]
dev = []
dev = [
"types-PyYAML",
]
[tool.ruff]
extend = "../../pyproject.toml"

View File

@@ -1,17 +1,30 @@
import argparse
import asyncio
import os
import sys
import warnings
from typing import Optional
from typing import Any, Dict, Optional
import yaml
from autogen_agentchat.ui import Console, UserInputManager
from autogen_core import CancellationToken
from autogen_ext.models.openai import OpenAIChatCompletionClient
from autogen_core.models import ChatCompletionClient
from autogen_ext.teams.magentic_one import MagenticOne
from autogen_ext.ui import RichConsole
# Suppress warnings about the requests.Session() not being closed
warnings.filterwarnings(action="ignore", message="unclosed", category=ResourceWarning)
DEFAULT_CONFIG_FILE = "config.yaml"
DEFAULT_CONFIG_CONTENTS = """# config.yaml
#
client:
provider: autogen_ext.models.openai.OpenAIChatCompletionClient
config:
model: gpt-4o
"""
async def cancellable_input(prompt: str, cancellation_token: Optional[CancellationToken]) -> str:
task: asyncio.Task[str] = asyncio.create_task(asyncio.to_thread(input, prompt))
@@ -32,11 +45,23 @@ def main() -> None:
task (str): The task to be executed by MagenticOne.
--no-hil: Optional flag to disable human-in-the-loop mode.
--rich: Optional flag to enable rich console output.
--config: Optional flag to specify an alternate model configuration
Example usage:
python magentic_one_cli.py "example task"
python magentic_one_cli.py --no-hil "example task"
python magentic_one_cli.py --rich "example task"
python magentic_one_cli.py --config config.yaml "example task"
Use --sample-config to print a sample configuration file.
Example:
python magentic_one_cli.py --sample-config
NOTE:
If --config is not specified, the configuration is loaded from the
file DEFAULT_CONFIG_FILE. If that file does not exist, load from
DEFAULT_CONFIG_CONTENTS.
"""
parser = argparse.ArgumentParser(
description=(
@@ -44,18 +69,50 @@ def main() -> None:
"For more information, refer to the following paper: https://arxiv.org/abs/2411.04468"
)
)
parser.add_argument("task", type=str, nargs=1, help="The task to be executed by MagenticOne.")
parser.add_argument("task", type=str, nargs="?", help="The task to be executed by MagenticOne.")
parser.add_argument("--no-hil", action="store_true", help="Disable human-in-the-loop mode.")
parser.add_argument(
"--rich",
action="store_true",
help="Enable rich console output",
)
parser.add_argument(
"--config",
type=str,
nargs=1,
help="The model configuration file to use. Leave empty to print a sample configuration.",
)
parser.add_argument("--sample-config", action="store_true", help="Print a sample configuration to console.")
args = parser.parse_args()
if args.sample_config:
sys.stdout.write(DEFAULT_CONFIG_CONTENTS + "\n")
return
# We're not printing a sample, so we need a task
if args.task is None:
parser.print_usage()
return
# Load the configuration
config: Dict[str, Any] = {}
if args.config is None:
if os.path.isfile(DEFAULT_CONFIG_FILE):
with open(DEFAULT_CONFIG_FILE, "r") as f:
config = yaml.safe_load(f)
else:
config = yaml.safe_load(DEFAULT_CONFIG_CONTENTS)
else:
with open(args.config[0], "r") as f:
config = yaml.safe_load(f)
client = ChatCompletionClient.load_component(config["client"])
# Run the task
async def run_task(task: str, hil_mode: bool, use_rich_console: bool) -> None:
input_manager = UserInputManager(callback=cancellable_input)
client = OpenAIChatCompletionClient(model="gpt-4o")
m1 = MagenticOne(client=client, hil_mode=hil_mode, input_func=input_manager.get_wrapped_callback())
if use_rich_console:

18
python/uv.lock generated
View File

@@ -3570,16 +3570,23 @@ source = { editable = "packages/magentic-one-cli" }
dependencies = [
{ name = "autogen-agentchat" },
{ name = "autogen-ext", extra = ["magentic-one", "openai", "rich"] },
{ name = "pyyaml" },
]
[package.dev-dependencies]
dev = [
{ name = "types-pyyaml" },
]
[package.metadata]
requires-dist = [
{ name = "autogen-agentchat", editable = "packages/autogen-agentchat" },
{ name = "autogen-ext", extras = ["openai", "magentic-one", "rich"], editable = "packages/autogen-ext" },
{ name = "pyyaml", specifier = ">=5.1" },
]
[package.metadata.requires-dev]
dev = []
dev = [{ name = "types-pyyaml" }]
[[package]]
name = "mako"
@@ -7279,6 +7286,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/74/db/c92ca6920cccd9c2998b013601542e2ac5e59bc805bcff94c94ad254b7df/types_pytz-2024.2.0.20241221-py3-none-any.whl", hash = "sha256:8fc03195329c43637ed4f593663df721fef919b60a969066e22606edf0b53ad5", size = 10008 },
]
[[package]]
name = "types-pyyaml"
version = "6.0.12.20241230"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/9a/f9/4d566925bcf9396136c0a2e5dc7e230ff08d86fa011a69888dd184469d80/types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c", size = 17078 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e8/c1/48474fbead512b70ccdb4f81ba5eb4a58f69d100ba19f17c92c0c4f50ae6/types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6", size = 20029 },
]
[[package]]
name = "types-requests"
version = "2.32.0.20241016"