diff --git a/README.md b/README.md index dab82e3..d81d469 100644 --- a/README.md +++ b/README.md @@ -150,6 +150,7 @@ In order to facilitate running the project entirely on the system where Home Ass ## Version History | Version | Description | |---------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| v0.3.8 | Update llama.cpp, remove think blocks from "thinking" models, fix wheel detection for some Intel CPUs, Fixes for compatibility with latest Home Assistant version (2025.4), other small bug fixes | | v0.3.7 | Update llama.cpp version to support newer models, Update minimum Home Assistant version to 2024.12.3, Add German In-Context Learning examples, Fix multi-turn use, Fix an issue with webcolors | | v0.3.6 | Small llama.cpp backend fixes | | v0.3.5 | Fix for llama.cpp backend installation, Fix for Home LLM v1-3 API parameters, add Polish ICL examples | diff --git a/custom_components/llama_conversation/const.py b/custom_components/llama_conversation/const.py index afd080a..9751598 100644 --- a/custom_components/llama_conversation/const.py +++ b/custom_components/llama_conversation/const.py @@ -309,6 +309,14 @@ DEFAULT_OPTIONS = types.MappingProxyType( ) OPTIONS_OVERRIDES = { + "home-llama-3.2": { + CONF_PROMPT: DEFAULT_PROMPT_BASE_LEGACY, + CONF_PROMPT_TEMPLATE: PROMPT_TEMPLATE_LLAMA3, + CONF_USE_IN_CONTEXT_LEARNING_EXAMPLES: False, + CONF_SERVICE_CALL_REGEX: FINE_TUNED_SERVICE_CALL_REGEX, + CONF_TOOL_FORMAT: TOOL_FORMAT_MINIMAL, + CONF_CONTEXT_LENGTH: 131072, + }, "home-3b-v3": { CONF_PROMPT: DEFAULT_PROMPT_BASE_LEGACY, CONF_PROMPT_TEMPLATE: PROMPT_TEMPLATE_ZEPHYR, @@ -395,5 +403,5 @@ OPTIONS_OVERRIDES = { }, } -INTEGRATION_VERSION = "0.3.7" +INTEGRATION_VERSION = "0.3.8" EMBEDDED_LLAMA_CPP_PYTHON_VERSION = "0.3.5" \ No newline at end of file diff --git a/custom_components/llama_conversation/manifest.json b/custom_components/llama_conversation/manifest.json index 80ef3d1..342fac8 100644 --- a/custom_components/llama_conversation/manifest.json +++ b/custom_components/llama_conversation/manifest.json @@ -1,7 +1,7 @@ { "domain": "llama_conversation", "name": "Local LLM Conversation", - "version": "0.3.7", + "version": "0.3.8", "codeowners": ["@acon96"], "config_flow": true, "dependencies": ["conversation"],