mirror of
https://github.com/acon96/home-llm.git
synced 2026-01-09 21:58:00 -05:00
Merge pull request #312 from mad3max3/develop
Fix config_flow set of model_config
This commit is contained in:
@@ -191,7 +191,9 @@ class GenericOpenAIAPIClient(LocalLLMClient):
|
|||||||
return endpoint, request_params
|
return endpoint, request_params
|
||||||
|
|
||||||
def _extract_response(self, response_json: dict, llm_api: llm.APIInstance | None, user_input: conversation.ConversationInput) -> Tuple[Optional[str], Optional[List[llm.ToolInput]]]:
|
def _extract_response(self, response_json: dict, llm_api: llm.APIInstance | None, user_input: conversation.ConversationInput) -> Tuple[Optional[str], Optional[List[llm.ToolInput]]]:
|
||||||
if len(response_json["choices"]) == 0: # finished
|
if "choices" not in response_json or len(response_json["choices"]) == 0: # finished
|
||||||
|
_LOGGER.warning("Response missing or empty 'choices'. Keys present: %s. Full response: %s",
|
||||||
|
list(response_json.keys()), response_json)
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
choice = response_json["choices"][0]
|
choice = response_json["choices"][0]
|
||||||
|
|||||||
@@ -1139,8 +1139,8 @@ class LocalLLMSubentryFlowHandler(ConfigSubentryFlow):
|
|||||||
selected_default_options[CONF_PROMPT] = build_prompt_template(
|
selected_default_options[CONF_PROMPT] = build_prompt_template(
|
||||||
selected_language, str(selected_default_options.get(CONF_PROMPT, DEFAULT_PROMPT))
|
selected_language, str(selected_default_options.get(CONF_PROMPT, DEFAULT_PROMPT))
|
||||||
)
|
)
|
||||||
|
|
||||||
self.model_config = selected_default_options
|
self.model_config = {**selected_default_options, **self.model_config}
|
||||||
|
|
||||||
schema = vol.Schema(
|
schema = vol.Schema(
|
||||||
local_llama_config_option_schema(
|
local_llama_config_option_schema(
|
||||||
|
|||||||
Reference in New Issue
Block a user