code cleanup to remove explicit constant name references to openai and anthropic

This commit is contained in:
Alex O'Connell
2026-01-04 09:10:29 -05:00
parent 35f8a337da
commit 136a25f91b
10 changed files with 67 additions and 63 deletions

View File

@@ -25,8 +25,8 @@ from .const import (
CONF_BACKEND_TYPE,
CONF_INSTALLED_LLAMACPP_VERSION,
CONF_SELECTED_LANGUAGE,
CONF_OPENAI_API_KEY,
CONF_GENERIC_OPENAI_PATH,
CONF_API_KEY,
CONF_API_PATH,
CONF_CHAT_MODEL, CONF_DOWNLOADED_MODEL_QUANTIZATION, CONF_DOWNLOADED_MODEL_FILE, CONF_REQUEST_TIMEOUT, CONF_MAX_TOOL_CALL_ITERATIONS,
CONF_REFRESH_SYSTEM_PROMPT, CONF_REMEMBER_CONVERSATION, CONF_REMEMBER_NUM_INTERACTIONS, CONF_REMEMBER_CONVERSATION_TIME_MINUTES,
CONF_PROMPT, CONF_TEMPERATURE, CONF_TOP_K, CONF_TOP_P, CONF_MIN_P, CONF_TYPICAL_P, CONF_MAX_TOKENS,
@@ -130,7 +130,7 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: LocalLLMConfigE
# Migrate the config_entry to be an entry + sub-entry
if config_entry.version == 2:
ENTRY_DATA_KEYS = [CONF_BACKEND_TYPE]
ENTRY_OPTIONS_KEYS = [CONF_SELECTED_LANGUAGE, CONF_HOST, CONF_PORT, CONF_SSL, CONF_OPENAI_API_KEY, CONF_GENERIC_OPENAI_PATH]
ENTRY_OPTIONS_KEYS = [CONF_SELECTED_LANGUAGE, CONF_HOST, CONF_PORT, CONF_SSL, CONF_API_KEY, CONF_API_PATH]
SUBENTRY_KEYS = [
CONF_CHAT_MODEL, CONF_DOWNLOADED_MODEL_QUANTIZATION, CONF_DOWNLOADED_MODEL_FILE, CONF_REQUEST_TIMEOUT, CONF_MAX_TOOL_CALL_ITERATIONS,
CONF_REFRESH_SYSTEM_PROMPT, CONF_REMEMBER_CONVERSATION, CONF_REMEMBER_NUM_INTERACTIONS, CONF_REMEMBER_CONVERSATION_TIME_MINUTES,
@@ -159,7 +159,7 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: LocalLLMConfigE
entry_options[CONF_INSTALLED_LLAMACPP_VERSION] = await hass.async_add_executor_job(get_llama_cpp_python_version)
else:
# ensure all remote backends have a path set
entry_options[CONF_GENERIC_OPENAI_PATH] = entry_options.get(CONF_GENERIC_OPENAI_PATH, "")
entry_options[CONF_API_PATH] = entry_options.get(CONF_API_PATH, "")
entry_title = BACKEND_TO_CLS[backend].get_name(entry_options)

View File

@@ -24,8 +24,9 @@ from custom_components.llama_conversation.const import (
CONF_REQUEST_TIMEOUT,
CONF_ENABLE_LEGACY_TOOL_CALLING,
CONF_TOOL_RESPONSE_AS_STRING,
CONF_OPENAI_API_KEY,
CONF_ANTHROPIC_BASE_URL,
CONF_API_KEY,
CONF_API_PATH,
CONF_BASE_URL,
DEFAULT_MAX_TOKENS,
DEFAULT_TEMPERATURE,
DEFAULT_TOP_P,
@@ -33,6 +34,7 @@ from custom_components.llama_conversation.const import (
DEFAULT_REQUEST_TIMEOUT,
DEFAULT_ENABLE_LEGACY_TOOL_CALLING,
DEFAULT_TOOL_RESPONSE_AS_STRING,
DEFAULT_API_PATH,
)
from custom_components.llama_conversation.entity import LocalLLMClient, TextGenerationResult
@@ -153,12 +155,14 @@ class AnthropicAPIClient(LocalLLMClient):
api_key: str
base_url: str
api_path: str
def __init__(self, hass: HomeAssistant, client_options: dict[str, Any]) -> None:
super().__init__(hass, client_options)
self.api_key = client_options.get(CONF_OPENAI_API_KEY, "")
self.base_url = client_options.get(CONF_ANTHROPIC_BASE_URL, "")
self.api_key = client_options.get(CONF_API_KEY, "")
self.base_url = client_options.get(CONF_BASE_URL, "")
self.api_path = client_options.get(CONF_API_PATH, DEFAULT_API_PATH)
async def _async_build_client(self, timeout: float | None = None) -> AsyncAnthropic:
"""Build an async Anthropic client (runs in executor to avoid blocking SSL ops)."""
@@ -182,7 +186,7 @@ class AnthropicAPIClient(LocalLLMClient):
@staticmethod
def get_name(client_options: dict[str, Any]) -> str:
base_url = client_options.get(CONF_ANTHROPIC_BASE_URL, "")
base_url = client_options.get(CONF_BASE_URL, "")
return f"Anthropic-compatible API at '{base_url}'"
@staticmethod
@@ -190,8 +194,8 @@ class AnthropicAPIClient(LocalLLMClient):
hass: HomeAssistant, user_input: Dict[str, Any]
) -> str | None:
"""Validate connection to the Anthropic-compatible API."""
api_key = user_input.get(CONF_OPENAI_API_KEY, "")
base_url = user_input.get(CONF_ANTHROPIC_BASE_URL, "")
api_key = user_input.get(CONF_API_KEY, "")
base_url = user_input.get(CONF_BASE_URL, "")
if not api_key:
return "API key is required"
@@ -222,12 +226,12 @@ class AnthropicAPIClient(LocalLLMClient):
except AuthenticationError as err:
_LOGGER.error("Anthropic authentication error: %s", err)
return f"Invalid API key: {err}"
except APIConnectionError as err:
_LOGGER.error("Anthropic connection error: %s", err)
return f"Connection error: {err}"
except APITimeoutError as err:
_LOGGER.error("Anthropic timeout error: %s", err)
return "Connection timed out"
except APIConnectionError as err:
_LOGGER.error("Anthropic connection error: %s", err)
return f"Connection error: {err}"
except APIError as err:
_LOGGER.error("Anthropic API error: status=%s, message=%s", getattr(err, 'status_code', 'N/A'), err)
return f"API error ({getattr(err, 'status_code', 'unknown')}): {err}"
@@ -259,7 +263,8 @@ class AnthropicAPIClient(LocalLLMClient):
}
base = self.base_url.rstrip("/")
models_url = f"{base}/v1/models"
path = self.api_path.strip("/")
models_url = f"{base}/{path}/models"
async with aiohttp.ClientSession() as session:
async with session.get(models_url, headers=headers, timeout=aiohttp.ClientTimeout(total=10)) as response:

View File

@@ -23,10 +23,10 @@ from custom_components.llama_conversation.const import (
CONF_TEMPERATURE,
CONF_TOP_P,
CONF_REQUEST_TIMEOUT,
CONF_OPENAI_API_KEY,
CONF_API_KEY,
CONF_REMEMBER_CONVERSATION,
CONF_REMEMBER_CONVERSATION_TIME_MINUTES,
CONF_GENERIC_OPENAI_PATH,
CONF_API_PATH,
CONF_ENABLE_LEGACY_TOOL_CALLING,
CONF_TOOL_RESPONSE_AS_STRING,
CONF_RESPONSE_JSON_SCHEMA,
@@ -36,7 +36,7 @@ from custom_components.llama_conversation.const import (
DEFAULT_REQUEST_TIMEOUT,
DEFAULT_REMEMBER_CONVERSATION,
DEFAULT_REMEMBER_CONVERSATION_TIME_MINUTES,
DEFAULT_GENERIC_OPENAI_PATH,
DEFAULT_API_PATH,
DEFAULT_ENABLE_LEGACY_TOOL_CALLING,
DEFAULT_TOOL_RESPONSE_AS_STRING,
RECOMMENDED_CHAT_MODELS,
@@ -59,24 +59,24 @@ class GenericOpenAIAPIClient(LocalLLMClient):
hostname=client_options[CONF_HOST],
port=client_options[CONF_PORT],
ssl=client_options[CONF_SSL],
path="/" + client_options.get(CONF_GENERIC_OPENAI_PATH, DEFAULT_GENERIC_OPENAI_PATH)
path="/" + client_options.get(CONF_API_PATH, DEFAULT_API_PATH)
)
self.api_key = client_options.get(CONF_OPENAI_API_KEY, "")
self.api_key = client_options.get(CONF_API_KEY, "")
@staticmethod
def get_name(client_options: dict[str, Any]):
host = client_options[CONF_HOST]
port = client_options[CONF_PORT]
ssl = client_options[CONF_SSL]
path = "/" + client_options[CONF_GENERIC_OPENAI_PATH]
path = "/" + client_options[CONF_API_PATH]
return f"Generic OpenAI at '{format_url(hostname=host, port=port, ssl=ssl, path=path)}'"
@staticmethod
async def async_validate_connection(hass: HomeAssistant, user_input: Dict[str, Any]) -> str | None:
headers = {}
api_key = user_input.get(CONF_OPENAI_API_KEY)
api_base_path = user_input.get(CONF_GENERIC_OPENAI_PATH, DEFAULT_GENERIC_OPENAI_PATH)
api_key = user_input.get(CONF_API_KEY)
api_base_path = user_input.get(CONF_API_PATH, DEFAULT_API_PATH)
if api_key:
headers["Authorization"] = f"Bearer {api_key}"
@@ -251,17 +251,17 @@ class GenericOpenAIResponsesAPIClient(LocalLLMClient):
hostname=client_options[CONF_HOST],
port=client_options[CONF_PORT],
ssl=client_options[CONF_SSL],
path="/" + client_options.get(CONF_GENERIC_OPENAI_PATH, DEFAULT_GENERIC_OPENAI_PATH)
path="/" + client_options.get(CONF_API_PATH, DEFAULT_API_PATH)
)
self.api_key = client_options.get(CONF_OPENAI_API_KEY, "")
self.api_key = client_options.get(CONF_API_KEY, "")
@staticmethod
def get_name(client_options: dict[str, Any]):
host = client_options[CONF_HOST]
port = client_options[CONF_PORT]
ssl = client_options[CONF_SSL]
path = "/" + client_options[CONF_GENERIC_OPENAI_PATH]
path = "/" + client_options[CONF_API_PATH]
return f"Generic OpenAI at '{format_url(hostname=host, port=port, ssl=ssl, path=path)}'"
def _responses_params(self, conversation: List[conversation.Content], entity_options: Dict[str, Any]) -> Tuple[str, Dict[str, Any]]:

View File

@@ -27,8 +27,8 @@ from custom_components.llama_conversation.const import (
CONF_MIN_P,
CONF_ENABLE_THINK_MODE,
CONF_REQUEST_TIMEOUT,
CONF_OPENAI_API_KEY,
CONF_GENERIC_OPENAI_PATH,
CONF_API_KEY,
CONF_API_PATH,
CONF_OLLAMA_KEEP_ALIVE_MIN,
CONF_OLLAMA_JSON_MODE,
CONF_CONTEXT_LENGTH,
@@ -43,7 +43,7 @@ from custom_components.llama_conversation.const import (
DEFAULT_MIN_P,
DEFAULT_ENABLE_THINK_MODE,
DEFAULT_REQUEST_TIMEOUT,
DEFAULT_GENERIC_OPENAI_PATH,
DEFAULT_API_PATH,
DEFAULT_OLLAMA_KEEP_ALIVE_MIN,
DEFAULT_OLLAMA_JSON_MODE,
DEFAULT_CONTEXT_LENGTH,
@@ -77,14 +77,14 @@ class OllamaAPIClient(LocalLLMClient):
def __init__(self, hass: HomeAssistant, client_options: dict[str, Any]) -> None:
super().__init__(hass, client_options)
base_path = _normalize_path(client_options.get(CONF_GENERIC_OPENAI_PATH, DEFAULT_GENERIC_OPENAI_PATH))
base_path = _normalize_path(client_options.get(CONF_API_PATH, DEFAULT_API_PATH))
self.api_host = format_url(
hostname=client_options[CONF_HOST],
port=client_options[CONF_PORT],
ssl=client_options[CONF_SSL],
path=base_path,
)
self.api_key = client_options.get(CONF_OPENAI_API_KEY) or None
self.api_key = client_options.get(CONF_API_KEY) or None
self._headers = {"Authorization": f"Bearer {self.api_key}"} if self.api_key else None
self._ssl_context = _build_default_ssl_context() if client_options.get(CONF_SSL) else None
@@ -105,13 +105,13 @@ class OllamaAPIClient(LocalLLMClient):
host = client_options[CONF_HOST]
port = client_options[CONF_PORT]
ssl = client_options[CONF_SSL]
path = _normalize_path(client_options.get(CONF_GENERIC_OPENAI_PATH, DEFAULT_GENERIC_OPENAI_PATH))
path = _normalize_path(client_options.get(CONF_API_PATH, DEFAULT_API_PATH))
return f"Ollama at '{format_url(hostname=host, port=port, ssl=ssl, path=path)}'"
@staticmethod
async def async_validate_connection(hass: HomeAssistant, user_input: Dict[str, Any]) -> str | None:
api_key = user_input.get(CONF_OPENAI_API_KEY)
base_path = _normalize_path(user_input.get(CONF_GENERIC_OPENAI_PATH, DEFAULT_GENERIC_OPENAI_PATH))
api_key = user_input.get(CONF_API_KEY)
base_path = _normalize_path(user_input.get(CONF_API_PATH, DEFAULT_API_PATH))
timeout_config: httpx.Timeout | float | None = httpx.Timeout(5)
verify_context = None

View File

@@ -22,7 +22,7 @@ from custom_components.llama_conversation.const import (
CONF_TEXT_GEN_WEBUI_ADMIN_KEY,
CONF_TEXT_GEN_WEBUI_CHAT_MODE,
CONF_CONTEXT_LENGTH,
CONF_GENERIC_OPENAI_PATH,
CONF_API_PATH,
DEFAULT_MAX_TOKENS,
DEFAULT_TOP_K,
DEFAULT_MIN_P,
@@ -53,7 +53,7 @@ class TextGenerationWebuiClient(GenericOpenAIAPIClient):
host = client_options[CONF_HOST]
port = client_options[CONF_PORT]
ssl = client_options[CONF_SSL]
path = "/" + client_options[CONF_GENERIC_OPENAI_PATH]
path = "/" + client_options[CONF_API_PATH]
return f"Text-Gen WebUI at '{format_url(hostname=host, port=port, ssl=ssl, path=path)}'"
async def _async_load_model(self, entity_options: dict[str, Any]) -> None:
@@ -126,7 +126,7 @@ class LlamaCppServerClient(GenericOpenAIAPIClient):
host = client_options[CONF_HOST]
port = client_options[CONF_PORT]
ssl = client_options[CONF_SSL]
path = "/" + client_options[CONF_GENERIC_OPENAI_PATH]
path = "/" + client_options[CONF_API_PATH]
return f"Llama.cpp Server at '{format_url(hostname=host, port=port, ssl=ssl, path=path)}'"
def _chat_completion_params(self, entity_options: Dict[str, Any]) -> Tuple[str, Dict[str, Any]]:

View File

@@ -86,12 +86,12 @@ from .const import (
CONF_USE_IN_CONTEXT_LEARNING_EXAMPLES,
CONF_IN_CONTEXT_EXAMPLES_FILE,
CONF_NUM_IN_CONTEXT_EXAMPLES,
CONF_OPENAI_API_KEY,
CONF_API_KEY,
CONF_TEXT_GEN_WEBUI_ADMIN_KEY,
CONF_TEXT_GEN_WEBUI_CHAT_MODE,
CONF_OLLAMA_KEEP_ALIVE_MIN,
CONF_OLLAMA_JSON_MODE,
CONF_GENERIC_OPENAI_PATH,
CONF_API_PATH,
CONF_CONTEXT_LENGTH,
CONF_LLAMACPP_BATCH_SIZE,
CONF_LLAMACPP_THREAD_COUNT,
@@ -140,7 +140,7 @@ from .const import (
DEFAULT_TEXT_GEN_WEBUI_CHAT_MODE,
DEFAULT_OLLAMA_KEEP_ALIVE_MIN,
DEFAULT_OLLAMA_JSON_MODE,
DEFAULT_GENERIC_OPENAI_PATH,
DEFAULT_API_PATH,
DEFAULT_CONTEXT_LENGTH,
DEFAULT_LLAMACPP_BATCH_SIZE,
DEFAULT_LLAMACPP_THREAD_COUNT,
@@ -153,7 +153,7 @@ from .const import (
BACKEND_TYPE_LLAMA_CPP_SERVER,
BACKEND_TYPE_OLLAMA,
BACKEND_TYPE_ANTHROPIC,
CONF_ANTHROPIC_BASE_URL,
CONF_BASE_URL,
TEXT_GEN_WEBUI_CHAT_MODE_CHAT,
TEXT_GEN_WEBUI_CHAT_MODE_INSTRUCT,
TEXT_GEN_WEBUI_CHAT_MODE_CHAT_INSTRUCT,
@@ -208,15 +208,15 @@ def remote_connection_schema(backend_type: str, *, host=None, port=None, ssl=Non
extra = {}
default_port = DEFAULT_PORT
default_path = DEFAULT_GENERIC_OPENAI_PATH
default_path = DEFAULT_API_PATH
# Anthropic uses a different schema - base URL + API key only (no host/port/ssl)
if backend_type == BACKEND_TYPE_ANTHROPIC:
return vol.Schema({
vol.Required(CONF_ANTHROPIC_BASE_URL, default=base_url if base_url else ""): TextSelector(
vol.Required(CONF_BASE_URL, default=base_url if base_url else ""): TextSelector(
TextSelectorConfig()
),
vol.Required(CONF_OPENAI_API_KEY, default=api_key if api_key else ""): TextSelector(
vol.Required(CONF_API_KEY, default=api_key if api_key else ""): TextSelector(
TextSelectorConfig(type=TextSelectorType.PASSWORD)
),
})
@@ -236,9 +236,9 @@ def remote_connection_schema(backend_type: str, *, host=None, port=None, ssl=Non
vol.Required(CONF_HOST, default=host if host else ""): str,
vol.Optional(CONF_PORT, default=port if port else default_port): str,
vol.Required(CONF_SSL, default=ssl if ssl else DEFAULT_SSL): bool,
vol.Optional(CONF_OPENAI_API_KEY): TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)),
vol.Optional(CONF_API_KEY): TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)),
vol.Optional(
CONF_GENERIC_OPENAI_PATH,
CONF_API_PATH,
default=selected_path if selected_path else default_path
): TextSelector(TextSelectorConfig(prefix="/")),
**extra
@@ -386,9 +386,9 @@ class ConfigFlow(BaseConfigFlow, domain=DOMAIN):
host=self.client_config.get(CONF_HOST),
port=self.client_config.get(CONF_PORT),
ssl=self.client_config.get(CONF_SSL),
selected_path=self.client_config.get(CONF_GENERIC_OPENAI_PATH),
api_key=self.client_config.get(CONF_OPENAI_API_KEY),
base_url=self.client_config.get(CONF_ANTHROPIC_BASE_URL),
selected_path=self.client_config.get(CONF_API_PATH),
api_key=self.client_config.get(CONF_API_KEY),
base_url=self.client_config.get(CONF_BASE_URL),
)
return self.async_show_form(
@@ -507,9 +507,9 @@ class OptionsFlow(BaseOptionsFlow):
host=client_config.get(CONF_HOST),
port=client_config.get(CONF_PORT),
ssl=client_config.get(CONF_SSL),
selected_path=client_config.get(CONF_GENERIC_OPENAI_PATH),
api_key=client_config.get(CONF_OPENAI_API_KEY),
base_url=client_config.get(CONF_ANTHROPIC_BASE_URL),
selected_path=client_config.get(CONF_API_PATH),
api_key=client_config.get(CONF_API_KEY),
base_url=client_config.get(CONF_BASE_URL),
)
return self.async_show_form(

View File

@@ -117,9 +117,7 @@ BACKEND_TYPE_LLAMA_CPP_SERVER = "llama_cpp_server"
BACKEND_TYPE_OLLAMA = "ollama"
BACKEND_TYPE_ANTHROPIC = "anthropic"
DEFAULT_BACKEND_TYPE = BACKEND_TYPE_LLAMA_CPP
# Anthropic-compatible API configuration
CONF_ANTHROPIC_BASE_URL = "anthropic_base_url"
CONF_BASE_URL = "base_url"
CONF_INSTALLED_LLAMACPP_VERSION = "installed_llama_cpp_version"
CONF_SELECTED_LANGUAGE = "selected_language"
CONF_SELECTED_LANGUAGE_OPTIONS = [ "en", "de", "fr", "es", "pl"]
@@ -163,7 +161,7 @@ DEFAULT_IN_CONTEXT_EXAMPLES_FILE = "in_context_examples.csv"
CONF_NUM_IN_CONTEXT_EXAMPLES = "num_in_context_examples"
DEFAULT_NUM_IN_CONTEXT_EXAMPLES = 4
CONF_TEXT_GEN_WEBUI_PRESET = "text_generation_webui_preset"
CONF_OPENAI_API_KEY = "openai_api_key"
CONF_API_KEY = "openai_api_key"
CONF_TEXT_GEN_WEBUI_ADMIN_KEY = "text_generation_webui_admin_key"
CONF_REFRESH_SYSTEM_PROMPT = "refresh_prompt_per_turn"
DEFAULT_REFRESH_SYSTEM_PROMPT = True
@@ -188,8 +186,8 @@ CONF_OLLAMA_KEEP_ALIVE_MIN = "ollama_keep_alive"
DEFAULT_OLLAMA_KEEP_ALIVE_MIN = 30
CONF_OLLAMA_JSON_MODE = "ollama_json_mode"
DEFAULT_OLLAMA_JSON_MODE = False
CONF_GENERIC_OPENAI_PATH = "openai_path"
DEFAULT_GENERIC_OPENAI_PATH = "v1"
CONF_API_PATH = "openai_path"
DEFAULT_API_PATH = "v1"
CONF_GENERIC_OPENAI_VALIDATE_MODEL = "openai_validate_model"
DEFAULT_GENERIC_OPENAI_VALIDATE_MODEL = True
CONF_CONTEXT_LENGTH = "context_length"

View File

@@ -1,3 +1,4 @@
huggingface-hub>=0.23.0
webcolors>=24.8.0
ollama>=0.5.1
anthropic>=0.75.0

View File

@@ -27,7 +27,7 @@ from custom_components.llama_conversation.const import (
DEFAULT_LLAMACPP_ENABLE_FLASH_ATTENTION,
DEFAULT_GBNF_GRAMMAR_FILE,
DEFAULT_PROMPT_CACHING_ENABLED,
CONF_GENERIC_OPENAI_PATH,
CONF_API_PATH,
)
@@ -82,7 +82,7 @@ def test_generic_openai_name_and_path(hass_defaults):
CONF_HOST: "localhost",
CONF_PORT: "8080",
CONF_SSL: False,
CONF_GENERIC_OPENAI_PATH: "v1",
CONF_API_PATH: "v1",
CONF_CHAT_MODEL: "demo",
},
)
@@ -91,7 +91,7 @@ def test_generic_openai_name_and_path(hass_defaults):
CONF_HOST: "localhost",
CONF_PORT: "8080",
CONF_SSL: False,
CONF_GENERIC_OPENAI_PATH: "v1",
CONF_API_PATH: "v1",
}
)
assert "Generic OpenAI" in name

View File

@@ -16,7 +16,7 @@ from custom_components.llama_conversation.const import (
CONF_CONTEXT_LENGTH,
CONF_DOWNLOADED_MODEL_FILE,
CONF_DOWNLOADED_MODEL_QUANTIZATION,
CONF_GENERIC_OPENAI_PATH,
CONF_API_PATH,
CONF_PROMPT,
CONF_REQUEST_TIMEOUT,
DOMAIN,
@@ -43,7 +43,7 @@ async def test_migrate_v2_creates_subentry_and_updates_entry(monkeypatch, hass):
CONF_HOST: "localhost",
CONF_PORT: "8080",
CONF_SSL: False,
CONF_GENERIC_OPENAI_PATH: "v1",
CONF_API_PATH: "v1",
CONF_PROMPT: "hello",
CONF_REQUEST_TIMEOUT: 90,
CONF_CHAT_MODEL: "model-x",