Release v0.3.11

This commit is contained in:
Alex O'Connell
2025-09-15 22:03:35 -04:00
parent dfd7f2ba9b
commit fc49f61a49
4 changed files with 5 additions and 4 deletions

View File

@@ -162,6 +162,7 @@ In order to facilitate running the project entirely on the system where Home Ass
## Version History
| Version | Description |
|---------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| v0.3.11 | Bug-fixes and llama.cpp version update |
| v0.3.10 | Add support for the OpenAI "Responses" API endpoint, Update llama.cpp version, Fix for breaking change in HA version 2025.7.0 |
| v0.3.9 | Update llama.cpp version, fix installation bugs, fix conversation history not working |
| v0.3.8 | Update llama.cpp, remove think blocks from "thinking" models, fix wheel detection for some Intel CPUs, Fixes for compatibility with latest Home Assistant version (2025.4), other small bug fixes |

View File

@@ -406,5 +406,5 @@ OPTIONS_OVERRIDES = {
},
}
INTEGRATION_VERSION = "0.3.10"
EMBEDDED_LLAMA_CPP_PYTHON_VERSION = "0.3.11"
INTEGRATION_VERSION = "0.3.11"
EMBEDDED_LLAMA_CPP_PYTHON_VERSION = "0.3.16"

View File

@@ -1,7 +1,7 @@
{
"domain": "llama_conversation",
"name": "Local LLM Conversation",
"version": "0.3.10",
"version": "0.3.11",
"codeowners": ["@acon96"],
"config_flow": true,
"dependencies": ["conversation"],

View File

@@ -1,6 +1,6 @@
#!/bin/bash
VERSION_TO_BUILD="v0.3.10"
VERSION_TO_BUILD="v0.3.16"
# make python 11 wheels
# docker run -it --rm \