mirror of
https://github.com/microsoft/autogen.git
synced 2026-04-20 03:02:16 -04:00
Ensure ModelInfo field is serialized for OpenAIChatCompletionClient (#5315)
<!-- Thank you for your contribution! Please review https://microsoft.github.io/autogen/docs/Contribute before opening a pull request. --> <!-- Please add a reviewer to the assignee section when you create a PR. If you don't have the access to it, we will shortly find a reviewer and assign them to your PR. --> ## Why are these changes needed? Fix bug where the `model_info` field is not serialized for the `OpenAIChatCompletionClient` class. This was because the `_raw_config` field was based on a version of the args that had been sanitized (model_info removed). We need the full model info field for non-openai models ```python from autogen_ext.agents.web_surfer import MultimodalWebSurfer from autogen_ext.models.openai import OpenAIChatCompletionClient from autogen_core.models import ModelInfo mistral_vllm_model = OpenAIChatCompletionClient( model="TheBloke/Mistral-7B-Instruct-v0.2-GGUF", base_url="http://localhost:1234/v1", api_key="empty", model_info=ModelInfo(vision=False, function_calling=True, json_output=False, family="unkown"), ) (mistral_vllm_model.dump_component().model_dump_json()) ``` Before ``` { "provider": "autogen_ext.models.openai.OpenAIChatCompletionClient", "component_type": "model", "version": 1, "component_version": 1, "description": "Chat completion client for OpenAI hosted models.", "label": "OpenAIChatCompletionClient", "config": { "model": "TheBloke/Mistral-7B-Instruct-v0.2-GGUF", "api_key": "empty", "base_url": "http://localhost:1234/v1" } } ``` After ``` { "provider": "autogen_ext.models.openai.OpenAIChatCompletionClient", "component_type": "model", "version": 1, "component_version": 1, "description": "Chat completion client for OpenAI hosted models.", "label": "OpenAIChatCompletionClient", "config": { "model": "TheBloke/Mistral-7B-Instruct-v0.2-GGUF", "api_key": "empty", "model_info": { "vision": false, "function_calling": true, "json_output": false, "family": "unkown" }, "base_url": "http://localhost:1234/v1" } } ``` <!-- Please give a short summary of the change and the problem this solves. --> ## Related issue number <!-- For example: "Closes #1234" --> ## Checks - [ ] I've included any doc changes needed for https://microsoft.github.io/autogen/. See https://microsoft.github.io/autogen/docs/Contribute#documentation to build and test documentation locally. - [ ] I've added tests (if relevant) corresponding to the changes introduced in this PR. - [ ] I've made sure all auto checks have passed.
This commit is contained in:
@@ -1062,7 +1062,9 @@ class OpenAIChatCompletionClient(BaseOpenAIChatCompletionClient, Component[OpenA
|
||||
raise ValueError("model is required for OpenAIChatCompletionClient")
|
||||
|
||||
model_capabilities: Optional[ModelCapabilities] = None # type: ignore
|
||||
self._raw_config: Dict[str, Any] = dict(kwargs).copy()
|
||||
copied_args = dict(kwargs).copy()
|
||||
|
||||
if "model_capabilities" in kwargs:
|
||||
model_capabilities = kwargs["model_capabilities"]
|
||||
del copied_args["model_capabilities"]
|
||||
@@ -1074,7 +1076,7 @@ class OpenAIChatCompletionClient(BaseOpenAIChatCompletionClient, Component[OpenA
|
||||
|
||||
client = _openai_client_from_config(copied_args)
|
||||
create_args = _create_args_from_config(copied_args)
|
||||
self._raw_config: Dict[str, Any] = copied_args
|
||||
|
||||
super().__init__(
|
||||
client=client, create_args=create_args, model_capabilities=model_capabilities, model_info=model_info
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user