From 77fd4b63bbacdf8bd145617347be812ba8c81c11 Mon Sep 17 00:00:00 2001 From: Alex O'Connell Date: Sun, 4 Jan 2026 10:03:00 -0500 Subject: [PATCH] readme/localization fixes --- README.md | 2 +- custom_components/llama_conversation/translations/en.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 0dcbd6c..ca875e7 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,7 @@ If you want to fine-tune a model yourself, see the details on how to do it in th ## Version History | Version | Description | |---------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| v0.4.6 | New dataset supporting proper tool calling, Add Anthropic "messages" style API support | +| v0.4.6 | New dataset supporting proper tool calling, Add Anthropic "messages" style API support, Add on-disk caching for Llama.cpp backend | | v0.4.5 | Add support for AI Task entities, Replace custom Ollama API implementation with the official `ollama-python` package to avoid future compatibility issues, Support multiple LLM APIs at once, Fix issues in tool call handling for various backends | | v0.4.4 | Fix issue with OpenAI backends appending `/v1` to all URLs, and fix an issue with tools being serialized into the system prompt. | | v0.4.3 | Fix an issue with the integration not creating model configs properly during setup | diff --git a/custom_components/llama_conversation/translations/en.json b/custom_components/llama_conversation/translations/en.json index be207d2..980ebe6 100644 --- a/custom_components/llama_conversation/translations/en.json +++ b/custom_components/llama_conversation/translations/en.json @@ -373,12 +373,12 @@ "model_backend": { "options": { "llama_cpp_python": "Llama.cpp", - "generic_openai": "Generic OpenAI Compatible API", - "generic_openai_responses": "Generic OpenAPI Compatible Responses API", + "generic_openai": "OpenAI Compatible 'Conversations' API", + "generic_openai_responses": "OpenAPI Compatible 'Responses' API", "llama_cpp_server": "Llama.cpp Server", "ollama": "Ollama API", "text-generation-webui_api": "text-generation-webui API", - "anthropic": "Anthropic API" + "anthropic": "Anthropic Compatible 'Messages' API" } }, "text_generation_webui_chat_mode": {