properly handle colons in ollama model names

This commit is contained in:
Alex O'Connell
2024-03-05 17:39:41 -05:00
parent 5a51c8eb7b
commit 569f6e848a
4 changed files with 24 additions and 12 deletions

View File

@@ -7,13 +7,16 @@ assignees: ''
---
**Describe the bug**
***Please do not report issues with the model generating incorrect output. This includes any instance where the model responds with `Failed to run: ...` or outputs badly formatted responses. If you are having trouble getting the correct output from the model, please open a Discussion thread instead.***
**Describe the bug**
A clear and concise description of what the bug is.
**Expected behavior**
**Expected behavior**
A clear and concise description of what you expected to happen.
**Logs**
**Logs**
If applicable, please upload any error or debug logs output by Home Assistant.
Please do not report issues with the model generating incorrect output. If you are having trouble getting the correct output from the model, please open a Discussion thread instead.
```
Paste logs here
```

View File

@@ -7,13 +7,13 @@ assignees: ''
---
**Please describe what you are trying to do with the component**
***Please do not request features for the model as an issue. You can refer to the pinned Discussion thread to make feature requests for the model/dataset.***
**Please describe what you are trying to do with the component**
A clear and concise description of what the missing capability is. (Ex. There is a sampling parameter that I want to use for my model that isn't exposed.)
**Describe the solution you'd like**
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Additional context**
**Additional context**
Add any other context or screenshots about the feature request here.
Please do not request features for the model as an issue. You can refer to the pinned Discussion thread to make feature requests for the model/dataset.

View File

@@ -746,7 +746,11 @@ class OllamaAPIAgent(LLaMAAgent):
_LOGGER.debug("Connection error was: %s", repr(ex))
raise ConfigEntryNotReady("There was a problem connecting to the remote server") from ex
if not any([ x["name"].split(":")[0] == self.model_name for x in currently_downloaded_result.json()["models"]]):
model_names = [ x["name"] for x in currently_downloaded_result.json()["models"] ]
if ":" in self.model_name:
if not any([ name == self.model_name for name in model_names]):
raise ConfigEntryNotReady(f"Ollama server does not have the provided model: {self.model_name}")
elif not any([ name.split(":")[0] == self.model_name for name in model_names ]):
raise ConfigEntryNotReady(f"Ollama server does not have the provided model: {self.model_name}")
def _chat_completion_params(self, conversation: dict) -> (str, dict):

View File

@@ -437,8 +437,13 @@ class ConfigFlow(BaseLlamaConversationConfigFlow, config_entries.ConfigFlow, dom
models = models_result.json()["models"]
for model in models:
if model["name"].split(":")[0] == self.model_config[CONF_CHAT_MODEL]:
model_name = self.model_config[CONF_CHAT_MODEL]
if ":" in model_name:
if model["name"] == model_name:
return ""
elif model["name"].split(":")[0] == model_name:
return ""
return "missing_model_api"