diff --git a/apps/stable_diffusion/src/utils/utils.py b/apps/stable_diffusion/src/utils/utils.py index 417c01ff..de3c5e32 100644 --- a/apps/stable_diffusion/src/utils/utils.py +++ b/apps/stable_diffusion/src/utils/utils.py @@ -32,6 +32,7 @@ from diffusers.pipelines.stable_diffusion.convert_from_ckpt import ( import requests from io import BytesIO from omegaconf import OmegaConf +from cpuinfo import get_cpu_info def get_extended_name(model_name): @@ -450,8 +451,12 @@ def get_available_devices(): except: print(f"{driver_name} devices are not available.") else: + cpu_name = get_cpu_info()["brand_raw"] for i, device in enumerate(device_list_dict): - device_list.append(f"{device['name']} => {driver_name}://{i}") + device_name = ( + cpu_name if device["name"] == "default" else device["name"] + ) + device_list.append(f"{device_name} => {driver_name}://{i}") return device_list set_iree_runtime_flags() diff --git a/apps/stable_diffusion/web/ui/stablelm_ui.py b/apps/stable_diffusion/web/ui/stablelm_ui.py index 42b56f34..7c935c80 100644 --- a/apps/stable_diffusion/web/ui/stablelm_ui.py +++ b/apps/stable_diffusion/web/ui/stablelm_ui.py @@ -132,6 +132,8 @@ with gr.Blocks(title="Chatbot") as stablelm_chat: ) supported_devices = available_devices enabled = len(supported_devices) > 0 + # show cpu-task device first in list for chatbot + supported_devices = supported_devices[-1:] + supported_devices[:-1] device = gr.Dropdown( label="Device", value=supported_devices[0] diff --git a/requirements.txt b/requirements.txt index f8e1efc5..7155d503 100644 --- a/requirements.txt +++ b/requirements.txt @@ -29,6 +29,7 @@ pytorch_lightning # for runwayml models tk pywebview sentencepiece +py-cpuinfo # Keep PyInstaller at the end. Sometimes Windows Defender flags it but most folks can continue even if it errors pefile