mirror of
https://github.com/acon96/home-llm.git
synced 2026-01-09 21:58:00 -05:00
Adds a new backend for Anthropic API alongside existing backends (Ollama, OpenAI, llama.cpp, etc.). Features: - Support for official Anthropic API (api.anthropic.com) - Custom base URL support for Anthropic-compatible servers - Streaming responses via Messages API - Tool calling support with Home Assistant integration - Vision support for image attachments Files changed: - backends/anthropic.py: New AnthropicAPIClient implementation - const.py: Added backend type and configuration constants - __init__.py: Registered new backend - config_flow.py: Added connection schema and options - manifest.json: Added anthropic>=0.40.0 dependency - translations/en.json: Added UI labels
19 lines
493 B
JSON
19 lines
493 B
JSON
{
|
|
"domain": "llama_conversation",
|
|
"name": "Local LLMs",
|
|
"version": "0.4.5",
|
|
"codeowners": ["@acon96"],
|
|
"config_flow": true,
|
|
"dependencies": ["conversation", "ai_task"],
|
|
"after_dependencies": ["assist_pipeline", "intent"],
|
|
"documentation": "https://github.com/acon96/home-llm",
|
|
"integration_type": "service",
|
|
"iot_class": "local_polling",
|
|
"requirements": [
|
|
"huggingface-hub>=0.23.0",
|
|
"webcolors>=24.8.0",
|
|
"ollama>=0.5.1",
|
|
"anthropic>=0.40.0"
|
|
]
|
|
}
|