Merge branch 'main' into develop

This commit is contained in:
Alex O'Connell
2026-01-04 10:06:28 -05:00
3 changed files with 5 additions and 4 deletions

View File

@@ -75,6 +75,7 @@ If you want to fine-tune a model yourself, see the details on how to do it in th
## Version History
| Version | Description |
|---------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| v0.4.6 | New dataset supporting proper tool calling, Add Anthropic "messages" style API support, Add on-disk caching for Llama.cpp backend |
| v0.4.5 | Add support for AI Task entities, Replace custom Ollama API implementation with the official `ollama-python` package to avoid future compatibility issues, Support multiple LLM APIs at once, Fix issues in tool call handling for various backends |
| v0.4.4 | Fix issue with OpenAI backends appending `/v1` to all URLs, and fix an issue with tools being serialized into the system prompt. |
| v0.4.3 | Fix an issue with the integration not creating model configs properly during setup |

View File

@@ -1,7 +1,7 @@
{
"domain": "llama_conversation",
"name": "Local LLMs",
"version": "0.4.5",
"version": "0.4.6",
"codeowners": ["@acon96"],
"config_flow": true,
"dependencies": ["conversation", "ai_task"],

View File

@@ -373,12 +373,12 @@
"model_backend": {
"options": {
"llama_cpp_python": "Llama.cpp",
"generic_openai": "Generic OpenAI Compatible API",
"generic_openai_responses": "Generic OpenAPI Compatible Responses API",
"generic_openai": "OpenAI Compatible 'Conversations' API",
"generic_openai_responses": "OpenAPI Compatible 'Responses' API",
"llama_cpp_server": "Llama.cpp Server",
"ollama": "Ollama API",
"text-generation-webui_api": "text-generation-webui API",
"anthropic": "Anthropic API"
"anthropic": "Anthropic Compatible 'Messages' API"
}
},
"text_generation_webui_chat_mode": {