Merge branch 'main' into develop

This commit is contained in:
Alex O'Connell
2026-01-04 10:06:28 -05:00
3 changed files with 5 additions and 4 deletions

View File

@@ -75,6 +75,7 @@ If you want to fine-tune a model yourself, see the details on how to do it in th
## Version History ## Version History
| Version | Description | | Version | Description |
|---------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| |---------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| v0.4.6 | New dataset supporting proper tool calling, Add Anthropic "messages" style API support, Add on-disk caching for Llama.cpp backend |
| v0.4.5 | Add support for AI Task entities, Replace custom Ollama API implementation with the official `ollama-python` package to avoid future compatibility issues, Support multiple LLM APIs at once, Fix issues in tool call handling for various backends | | v0.4.5 | Add support for AI Task entities, Replace custom Ollama API implementation with the official `ollama-python` package to avoid future compatibility issues, Support multiple LLM APIs at once, Fix issues in tool call handling for various backends |
| v0.4.4 | Fix issue with OpenAI backends appending `/v1` to all URLs, and fix an issue with tools being serialized into the system prompt. | | v0.4.4 | Fix issue with OpenAI backends appending `/v1` to all URLs, and fix an issue with tools being serialized into the system prompt. |
| v0.4.3 | Fix an issue with the integration not creating model configs properly during setup | | v0.4.3 | Fix an issue with the integration not creating model configs properly during setup |

View File

@@ -1,7 +1,7 @@
{ {
"domain": "llama_conversation", "domain": "llama_conversation",
"name": "Local LLMs", "name": "Local LLMs",
"version": "0.4.5", "version": "0.4.6",
"codeowners": ["@acon96"], "codeowners": ["@acon96"],
"config_flow": true, "config_flow": true,
"dependencies": ["conversation", "ai_task"], "dependencies": ["conversation", "ai_task"],

View File

@@ -373,12 +373,12 @@
"model_backend": { "model_backend": {
"options": { "options": {
"llama_cpp_python": "Llama.cpp", "llama_cpp_python": "Llama.cpp",
"generic_openai": "Generic OpenAI Compatible API", "generic_openai": "OpenAI Compatible 'Conversations' API",
"generic_openai_responses": "Generic OpenAPI Compatible Responses API", "generic_openai_responses": "OpenAPI Compatible 'Responses' API",
"llama_cpp_server": "Llama.cpp Server", "llama_cpp_server": "Llama.cpp Server",
"ollama": "Ollama API", "ollama": "Ollama API",
"text-generation-webui_api": "text-generation-webui API", "text-generation-webui_api": "text-generation-webui API",
"anthropic": "Anthropic API" "anthropic": "Anthropic Compatible 'Messages' API"
} }
}, },
"text_generation_webui_chat_mode": { "text_generation_webui_chat_mode": {