Fix Azure OpenAI setup problems (#4875)

* [Fix] Recover the azure config load function

* [Style] Apply black, isort, mypy, autoflake

* [Fix] Rename the return parameter from 'azure_model_map' to 'azure_model_to_deployment_id_map'

* [Feat] Change the azure config file path to be dynamically configurable

* [Test] Add azure_config and azure deployment_id_for_model

* [Style] Apply black, isort, mypy, autoflake

* [Style] Apply black, isort, mypy, autoflake

* Refactor Azure configuration

- Refactor the `azure_config_file` attribute in the `Config` class to be optional.
- Refactor the `azure_model_to_deployment_id_map` attribute in the `Config` class to be optional and provide default values.
- Update the `get_azure_deployment_id_for_model` function to accept additional parameters.
- Update references to `get_azure_deployment_id_for_model` in `create_text_completion`, `create_chat_completion`, and `get_embedding` functions to pass the required parameters.

* Clean up process for azure

* Docstring

* revert some unneccessary fiddling

* Avoid altering args to models

* Retry on 404s

* Don't permanently change the environment

* Formatting

---------

Co-authored-by: Luke <2609441+lc0rp@users.noreply.github.com>
Co-authored-by: lc0rp <2609411+lc0rp@users.noreply.github.com>
Co-authored-by: collijk <collijk@uw.edu>
This commit is contained in:
Jayden
2023-07-07 09:51:59 +09:00
committed by GitHub
parent e4a337f1a5
commit ac17518663
5 changed files with 114 additions and 13 deletions

View File

@@ -58,6 +58,10 @@ OPENAI_API_KEY=your-openai-api-key
## USE_AZURE - Use Azure OpenAI or not (Default: False)
# USE_AZURE=False
## AZURE_CONFIG_FILE - The path to the azure.yaml file (Default: azure.yaml)
# AZURE_CONFIG_FILE=azure.yaml
################################################################################
### LLM MODELS
################################################################################

View File

@@ -44,6 +44,8 @@ class Config(SystemSettings):
openai_organization: Optional[str] = None
temperature: float
use_azure: bool
azure_config_file: Optional[str] = None
azure_model_to_deployment_id_map: Optional[Dict[str, str]] = None
execute_local_commands: bool
restrict_to_workspace: bool
openai_api_type: Optional[str] = None
@@ -83,6 +85,31 @@ class Config(SystemSettings):
plugins: list[str]
authorise_key: str
def get_azure_kwargs(self, model: str) -> dict[str, str]:
"""Get the kwargs for the Azure API."""
deployment_id = {
self.fast_llm_model: self.azure_model_to_deployment_id_map.get(
"fast_llm_model_deployment_id"
),
self.smart_llm_model: self.azure_model_to_deployment_id_map.get(
"smart_llm_model_deployment_id"
),
"text-embedding-ada-002": self.azure_model_to_deployment_id_map.get(
"embedding_model_deployment_id"
),
}.get(model, None)
kwargs = {
"api_type": self.openai_api_type,
"api_base": self.openai_api_base,
"api_version": self.openai_api_version,
}
if model == "text-embedding-ada-002":
kwargs["engine"] = deployment_id
else:
kwargs["deployment_id"] = deployment_id
return kwargs
class ConfigBuilder(Configurable[Config]):
default_plugins_config_file = os.path.join(
@@ -125,6 +152,7 @@ class ConfigBuilder(Configurable[Config]):
browse_spacy_language_model="en_core_web_sm",
temperature=0,
use_azure=False,
azure_config_file=AZURE_CONFIG_FILE,
execute_local_commands=False,
restrict_to_workspace=True,
openai_functions=False,
@@ -168,6 +196,7 @@ class ConfigBuilder(Configurable[Config]):
"browse_spacy_language_model": os.getenv("BROWSE_SPACY_LANGUAGE_MODEL"),
"openai_api_key": os.getenv("OPENAI_API_KEY"),
"use_azure": os.getenv("USE_AZURE") == "True",
"azure_config_file": os.getenv("AZURE_CONFIG_FILE", AZURE_CONFIG_FILE),
"execute_local_commands": os.getenv("EXECUTE_LOCAL_COMMANDS", "False")
== "True",
"restrict_to_workspace": os.getenv("RESTRICT_TO_WORKSPACE", "True")
@@ -236,12 +265,15 @@ class ConfigBuilder(Configurable[Config]):
config_dict["temperature"] = float(os.getenv("TEMPERATURE"))
if config_dict["use_azure"]:
azure_config = cls.load_azure_config()
azure_config = cls.load_azure_config(config_dict["azure_config_file"])
config_dict["openai_api_type"] = azure_config["openai_api_type"]
config_dict["openai_api_base"] = azure_config["openai_api_base"]
config_dict["openai_api_version"] = azure_config["openai_api_version"]
config_dict["azure_model_to_deployment_id_map"] = azure_config[
"azure_model_to_deployment_id_map"
]
if os.getenv("OPENAI_API_BASE_URL"):
elif os.getenv("OPENAI_API_BASE_URL"):
config_dict["openai_api_base"] = os.getenv("OPENAI_API_BASE_URL")
openai_organization = os.getenv("OPENAI_ORGANIZATION")
@@ -270,10 +302,11 @@ class ConfigBuilder(Configurable[Config]):
config_params = yaml.load(file, Loader=yaml.FullLoader) or {}
return {
"openai_api_type": config_params.get("azure_api_type") or "azure",
"openai_api_base": config_params.get("azure_api_base") or "",
"openai_api_version": config_params.get("azure_api_version")
or "2023-03-15-preview",
"openai_api_type": config_params.get("azure_api_type", "azure"),
"openai_api_base": config_params.get("azure_api_base", ""),
"openai_api_version": config_params.get(
"azure_api_version", "2023-03-15-preview"
),
"azure_model_to_deployment_id_map": config_params.get(
"azure_model_map", {}
),

View File

@@ -1,12 +1,10 @@
from __future__ import annotations
from dataclasses import asdict
from typing import List, Literal, Optional
from colorama import Fore
from autogpt.config import Config
from autogpt.logs import logger
from ..api_manager import ApiManager
from ..base import ChatModelResponse, ChatSequence, Message
@@ -74,7 +72,7 @@ def create_text_completion(
temperature = config.temperature
if config.use_azure:
kwargs = {"deployment_id": config.get_azure_deployment_id_for_model(model)}
kwargs = config.get_azure_kwargs(model)
else:
kwargs = {"model": model}
@@ -141,9 +139,8 @@ def create_chat_completion(
chat_completion_kwargs["api_key"] = config.openai_api_key
if config.use_azure:
chat_completion_kwargs[
"deployment_id"
] = config.get_azure_deployment_id_for_model(model)
chat_completion_kwargs.update(config.get_azure_kwargs(model))
if functions:
chat_completion_kwargs["functions"] = [
function.__dict__ for function in functions

View File

@@ -42,7 +42,7 @@ def get_embedding(
model = config.embedding_model
if config.use_azure:
kwargs = {"engine": config.get_azure_deployment_id_for_model(model)}
kwargs = config.get_azure_kwargs(model)
else:
kwargs = {"model": model}
@@ -51,6 +51,8 @@ def get_embedding(
f" with model '{model}'"
+ (f" via Azure deployment '{kwargs['engine']}'" if config.use_azure else "")
)
if config.use_azure:
breakpoint()
embeddings = iopenai.create_embedding(
input,

View File

@@ -2,6 +2,7 @@
Test cases for the config class, which handles the configuration settings
for the AI and ensures it behaves as a singleton.
"""
import os
from unittest import mock
from unittest.mock import patch
@@ -145,6 +146,70 @@ def test_missing_azure_config(workspace: Workspace):
assert azure_config["azure_model_to_deployment_id_map"] == {}
def test_azure_config(workspace: Workspace) -> None:
yaml_content = """
azure_api_type: azure
azure_api_base: https://dummy.openai.azure.com
azure_api_version: 2023-06-01-preview
azure_model_map:
fast_llm_model_deployment_id: gpt-3.5-turbo
smart_llm_model_deployment_id: gpt-4
embedding_model_deployment_id: embedding-deployment-id-for-azure
"""
config_file = workspace.get_path("azure.yaml")
config_file.write_text(yaml_content)
os.environ["USE_AZURE"] = "True"
os.environ["AZURE_CONFIG_FILE"] = str(config_file)
config = ConfigBuilder.build_config_from_env()
assert config.openai_api_type == "azure"
assert config.openai_api_base == "https://dummy.openai.azure.com"
assert config.openai_api_version == "2023-06-01-preview"
assert config.azure_model_to_deployment_id_map == {
"fast_llm_model_deployment_id": "gpt-3.5-turbo",
"smart_llm_model_deployment_id": "gpt-4",
"embedding_model_deployment_id": "embedding-deployment-id-for-azure",
}
del os.environ["USE_AZURE"]
del os.environ["AZURE_CONFIG_FILE"]
def test_azure_deployment_id_for_model(workspace: Workspace) -> None:
yaml_content = """
azure_api_type: azure
azure_api_base: https://dummy.openai.azure.com
azure_api_version: 2023-06-01-preview
azure_model_map:
fast_llm_model_deployment_id: gpt-3.5-turbo
smart_llm_model_deployment_id: gpt-4
embedding_model_deployment_id: embedding-deployment-id-for-azure
"""
config_file = workspace.get_path("azure.yaml")
config_file.write_text(yaml_content)
os.environ["USE_AZURE"] = "True"
os.environ["AZURE_CONFIG_FILE"] = str(config_file)
config = ConfigBuilder.build_config_from_env()
config.fast_llm_model = "fast_llm_model"
config.smart_llm_model = "smart_llm_model"
def _get_deployment_id(model):
kwargs = config.get_azure_kwargs(model)
return kwargs.get("deployment_id", kwargs.get("engine"))
assert _get_deployment_id(config.fast_llm_model) == "gpt-3.5-turbo"
assert _get_deployment_id(config.smart_llm_model) == "gpt-4"
assert (
_get_deployment_id("text-embedding-ada-002")
== "embedding-deployment-id-for-azure"
)
assert _get_deployment_id("dummy") is None
del os.environ["USE_AZURE"]
del os.environ["AZURE_CONFIG_FILE"]
def test_create_config_gpt4only(config: Config) -> None:
fast_llm_model = config.fast_llm_model
smart_llm_model = config.smart_llm_model