Release v0.4.3

This commit is contained in:
Alex O'Connell
2025-11-02 12:58:36 -05:00
parent ca6050b6d5
commit 08d3c6d2ee
3 changed files with 3 additions and 2 deletions

View File

@@ -158,6 +158,7 @@ python3 train.py \
## Version History
| Version | Description |
|---------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| v0.4.3 | Fix an issue with the integration not creating model configs properly during setup |
| v0.4.2 | Fix the following issues: not correctly setting default model settings during initial setup, non-integers being allowed in numeric config fields, being too strict with finish_reason requirements, and not letting the user clear the active LLM API |
| v0.4.1 | Fix an issue with using Llama.cpp models downloaded from HuggingFace |
| v0.4 | Rewrite integration to support tool calling models/agentic tool use loop, voice streaming, multiple config sub-entries per backend, and dynamic llama.cpp processor selection |

View File

@@ -337,5 +337,5 @@ def option_overrides(backend_type: str) -> dict[str, Any]:
},
}
INTEGRATION_VERSION = "0.4.2"
INTEGRATION_VERSION = "0.4.3"
EMBEDDED_LLAMA_CPP_PYTHON_VERSION = "0.3.16+b6153"

View File

@@ -1,7 +1,7 @@
{
"domain": "llama_conversation",
"name": "Local LLMs",
"version": "0.4.2",
"version": "0.4.3",
"codeowners": ["@acon96"],
"config_flow": true,
"dependencies": ["conversation"],