Merge pull request #312 from mad3max3/develop

Fix config_flow set of model_config
This commit is contained in:
Alex O'Connell
2025-10-26 21:41:59 -04:00
committed by GitHub
2 changed files with 5 additions and 3 deletions

View File

@@ -191,7 +191,9 @@ class GenericOpenAIAPIClient(LocalLLMClient):
return endpoint, request_params
def _extract_response(self, response_json: dict, llm_api: llm.APIInstance | None, user_input: conversation.ConversationInput) -> Tuple[Optional[str], Optional[List[llm.ToolInput]]]:
if len(response_json["choices"]) == 0: # finished
if "choices" not in response_json or len(response_json["choices"]) == 0: # finished
_LOGGER.warning("Response missing or empty 'choices'. Keys present: %s. Full response: %s",
list(response_json.keys()), response_json)
return None, None
choice = response_json["choices"][0]

View File

@@ -1139,8 +1139,8 @@ class LocalLLMSubentryFlowHandler(ConfigSubentryFlow):
selected_default_options[CONF_PROMPT] = build_prompt_template(
selected_language, str(selected_default_options.get(CONF_PROMPT, DEFAULT_PROMPT))
)
self.model_config = selected_default_options
self.model_config = {**selected_default_options, **self.model_config}
schema = vol.Schema(
local_llama_config_option_schema(