Procyon parameters (#146)

This commit is contained in:
j-lin-lmg
2025-06-02 12:35:09 -07:00
committed by GitHub
parent 738781b717
commit ba29393f00
5 changed files with 124 additions and 63 deletions

View File

@@ -32,46 +32,57 @@ logging.basicConfig(
ENCODER_TO_PRESET = {
"h264_cpu": {
"file": f"{SCRIPT_DIR}\\presets\\h264_bigbuckbunny_1080p_cpu_test.json",
"name": "\"CPU 1080p BBB H264\""
"name": "1080p_bbb_h264",
"api": "cpu"
},
"h265_cpu": {
"file": f"{SCRIPT_DIR}\\presets\\h265_bigbuckbunny_1080p_cpu_test.json",
"name": "\"CPU 1080p BBB H265\""
"name": "1080p_bbb_h265",
"api": "cpu"
},
"av1_cpu": {
"file": f"{SCRIPT_DIR}\\presets\\av1-svt_bigbuckbunny_1080p_cpu_test.json",
"name": "\"CPU 1080p BBB AV1\""
"name": "1080p_bbb_av1",
"api": "cpu"
},
"h264_nvenc": {
"file": f"{SCRIPT_DIR}\\presets\\h264_nvenc_bigbuckbunny_1080p_gpu_test.json",
"name": "\"NVENC 1080p BBB H264\""
"name": "1080p_bbb_h264",
"api": "nvenc"
},
"h265_nvenc": {
"file": f"{SCRIPT_DIR}\\presets\\h265_nvenc_bigbuckbunny_1080p_gpu_test.json",
"name": "\"NVENC 1080p BBB H265\""
"name": "1080p_bbb_h265",
"api": "nvenc"
},
"av1_nvenc": {
"file": f"{SCRIPT_DIR}\\presets\\av1-nvenc_bigbuckbunny_1080p_gpu_test.json",
"name": "\"NVENC 1080p BBB AV1\""
"name": "1080p_bbb_av1",
"api": "nvenc"
},
"h264_vce": {
"file": f"{SCRIPT_DIR}\\presets\\h264-vce-bigbuckbunny_1080p_gpu_test.json",
"name": "\"AMD VCE 1080p BBB H264\""
"name": "1080p_bbb_h264",
"api": "vce"
},
"av1_vce": {
"file": f"{SCRIPT_DIR}\\presets\\av1-vce-bigbuckbunny_1080p_gpu_test.json",
"name": "\"AMD VCE 1080p BBB AV1\""
"name": "1080p_bbb_av1",
"api": "vce"
},
"h264_quicksync": {
"file": f"{SCRIPT_DIR}\\presets\\h264-quicksync_bigbuckbunny_1080p_gpu_test.json",
"name": "\"QUICKSYNC 1080p BBB H264\""
"name": "1080p_bbb_h264",
"api": "quicksync"
},
"av1_quicksync": {
"file": f"{SCRIPT_DIR}\\presets\\av1-quicksync_bigbuckbunny_1080p_gpu_test.json",
"name": "\"QUICKSYNC 1080p BBB AV1\""
"name": "1080p_bbb_av1",
"api": "quicksync"
}
}
console = logging.StreamHandler()
formatter = logging.Formatter(DEFAULT_LOGGING_FORMAT)
console.setFormatter(formatter)
@@ -136,6 +147,7 @@ def main():
report = {
"test": "HandBrake Encoding",
"test_parameter": f"{ENCODER_TO_PRESET[args.encoder]['name']}",
"api": ENCODER_TO_PRESET[args.encoder]['api'],
"score": score,
"unit": "frames per second",
"version": "1.9.1",

View File

@@ -1,4 +1,5 @@
"""UL Procyon Computer Vision test script"""
# pylint: disable=no-name-in-module
from argparse import ArgumentParser
import logging
from pathlib import Path
@@ -52,72 +53,83 @@ BENCHMARK_CONFIG = {
"device_name": "CPU",
# TODO: Find a good way to report the CPU name here.
"device_id": "CPU",
"test_name": "WinML CPU (FLOAT32)"
"test_name": "cpu_float32",
"api": "winml"
},
"AMD_GPU0": {
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_winml_gpu.def\"",
"process_name": "WinML.exe",
"device_name": list(WINML_DEVICES.keys())[0],
"device_id": list(WINML_DEVICES.values())[0],
"test_name": "WinML GPU (FLOAT32)"
"test_name": "gpu_float32",
"api": "winml"
},
"AMD_GPU1": {
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_winml_gpu.def\"",
"process_name": "WinML.exe",
"device_name": list(WINML_DEVICES.keys())[1] if len(list(WINML_DEVICES.keys())) > 1 else list(WINML_DEVICES.keys())[0],
"device_id": list(WINML_DEVICES.values())[1] if len(list(WINML_DEVICES.values())) > 1 else list(WINML_DEVICES.values())[0],
"test_name": "WinML GPU (FLOAT32)"
"test_name": "gpu_float32",
"api": "winml"
},
"Intel_CPU": {
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_openvino_cpu.def\"",
"process_name": "OpenVino.exe",
"device_id": "CPU",
"device_name": OPENVINO_DEVICES["CPU"],
"test_name": "Intel OpenVINO CPU (FLOAT32)"
"test_name": "cpu_float32",
"api": "openvino"
},
"Intel_GPU0": {
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_openvino_gpu.def\"",
"process_name": "OpenVino.exe",
"device_id": "GPU.0" if "GPU.0" in list(OPENVINO_DEVICES.keys()) else "GPU",
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.0"),
"test_name": "Intel OpenVINO GPU 0 (FLOAT32)"
"test_name": "gpu_float32",
"api": "openvino"
},
"Intel_GPU1": {
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_openvino_gpu.def\"",
"process_name": "OpenVino.exe",
"device_id": "GPU.1" if "GPU.1" in list(OPENVINO_DEVICES.keys()) else "GPU",
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.0"),
"test_name": "Intel OpenVINO GPU 1 (FLOAT32)"
"test_name": "gpu_float32",
"api": "openvino"
},
"Intel_NPU": {
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_openvino_npu.def\"",
"process_name": "OpenVino.exe",
"device_id": "NPU",
"device_name": OPENVINO_DEVICES.get("NPU", "None"),
"test_name": "Intel OpenVINO NPU (FLOAT32)"
"test_name": "npu_float32",
"api": "openvino"
},
"NVIDIA_GPU": {
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_tensorrt.def\"",
"device_id": "cuda:0",
"device_name": CUDA_DEVICES.get("cuda:0"),
"process_name": "TensorRT.exe",
"test_name": "NVIDIA TensorRT (FLOAT32)"
"test_name": "gpu_float32",
"api": "tensorrt"
},
"Qualcomm_HTP": {
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_snpe.def\"",
"device_id": "CPU",
"device_name": "CPU",
"process_name": "SNPE.exe",
"test_name": "Qualcomm SNPE (INTEGER)"
"test_name": "htp_integer",
"api": "snpe"
},
}
RESULTS_FILENAME = "result.xml"
REPORT_PATH = LOG_DIR / RESULTS_FILENAME
def setup_logging():
"""setup logging"""
setup_log_directory(LOG_DIR)
setup_log_directory(str(LOG_DIR))
logging.basicConfig(filename=LOG_DIR / "harness.log",
format=DEFAULT_LOGGING_FORMAT,
datefmt=DEFAULT_DATE_FORMAT,
@@ -209,6 +221,7 @@ try:
"end_time": seconds_to_milliseconds(end_time),
"test": "Procyon AI CV",
"test_parameter": BENCHMARK_CONFIG[args.engine]["test_name"],
"api": BENCHMARK_CONFIG[args.engine]["api"],
"test_version": find_test_version(),
"device_name": BENCHMARK_CONFIG[args.engine]["device_name"],
"procyon_version": find_procyon_version(),
@@ -216,7 +229,7 @@ try:
"score": score
}
write_report_json(LOG_DIR, "report.json", report)
write_report_json(str(LOG_DIR), "report.json", report)
except Exception as e:
logging.error("Something went wrong running the benchmark!")
logging.exception(e)

View File

@@ -1,4 +1,5 @@
"""UL Procyon AI Image Generation test script"""
# pylint: disable=no-name-in-module
from argparse import ArgumentParser
import logging
from pathlib import Path
@@ -44,93 +45,107 @@ BENCHMARK_CONFIG = {
"process_name": "ort-directml.exe",
"device_name": list(WINML_DEVICES.keys())[0],
"device_id": "0",
"test_name": "ONNX Stable Diffusion FP16"
"test_name": "stable_diffusion_fp16",
"api": "onnx"
},
"AMD_GPU1_FP16": {
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_onnxruntime.def\"",
"process_name": "ort-directml.exe",
"device_name": list(WINML_DEVICES.keys())[1] if len(list(WINML_DEVICES.keys())) > 1 else list(WINML_DEVICES.keys())[0],
"device_id": "1" if len(list(WINML_DEVICES.values())) > 1 else "0",
"test_name": "ONNX Stable Diffusion FP16"
"test_name": "stable_diffusion_fp16",
"api": "onnx"
},
"AMD_GPU0_XL_FP16": {
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_onnxruntime.def\"",
"process_name": "ort-directml.exe",
"device_name": list(WINML_DEVICES.keys())[0],
"device_id": "0",
"test_name": "ONNX Stable Diffusion FP16 XL"
"test_name": "stable_diffusion_fp16_xl",
"api": "onnx"
},
"AMD_GPU1_XL_FP16": {
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_onnxruntime.def\"",
"process_name": "ort-directml.exe",
"device_name": list(WINML_DEVICES.keys())[1] if len(list(WINML_DEVICES.keys())) > 1 else list(WINML_DEVICES.keys())[0],
"device_id": list(WINML_DEVICES.values())[1] if len(list(WINML_DEVICES.values())) > 1 else list(WINML_DEVICES.values())[0],
"test_name": "ONNX Stable Diffusion FP16 XL"
"test_name": "stable_diffusion_fp16_xl",
"api": "onnx"
},
"Intel_GPU0_INT8": {
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15int8_openvino.def\"",
"process_name": "openvino.exe",
"device_id": "GPU.0" if "GPU.0" in list(OPENVINO_DEVICES.keys()) else "GPU",
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.0"),
"test_name": "Intel OpenVINO Stable Diffusion INT8"
"test_name": "stable_diffusion_int8",
"api": "openvino"
},
"Intel_GPU0_FP16": {
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_openvino.def\"",
"process_name": "openvino.exe",
"device_id": "GPU.0" if "GPU.0" in list(OPENVINO_DEVICES.keys()) else "GPU",
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.0"),
"test_name": "Intel OpenVINO Stable Diffusion FP16"
"test_name": "stable_diffusion_fp16",
"api": "openvino"
},
"Intel_GPU0_XL_FP16": {
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_openvino.def\"",
"process_name": "openvino.exe",
"device_id": "GPU.0" if "GPU.0" in list(OPENVINO_DEVICES.keys()) else "GPU",
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.0"),
"test_name": "Intel OpenVINO Stable Diffusion FP16 XL"
"test_name": "stable_diffusion_fp16_xl",
"api": "openvino"
},
"Intel_GPU1_INT8": {
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15int8_openvino.def\"",
"process_name": "openvino.exe",
"device_id": "GPU.1" if "GPU.1" in list(OPENVINO_DEVICES.keys()) else "GPU",
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.1"),
"test_name": "Intel OpenVINO Stable Diffusion INT8"
"test_name": "stable_diffusion_int8",
"api": "openvino"
},
"Intel_GPU1_FP16": {
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_openvino.def\"",
"process_name": "openvino.exe",
"device_id": "GPU.1" if "GPU.1" in list(OPENVINO_DEVICES.keys()) else "GPU",
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.1"),
"test_name": "Intel OpenVINO Stable Diffusion FP16"
"test_name": "stable_diffusion_fp16",
"api": "openvino"
},
"Intel_GPU1_XL_FP16": {
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_openvino.def\"",
"process_name": "openvino.exe",
"device_id": "GPU.1" if "GPU.1" in list(OPENVINO_DEVICES.keys()) else "GPU",
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.1"),
"test_name": "Intel OpenVINO Stable Diffusion FP16 XL"
"test_name": "stable_diffusion_fp16_xl",
"api": "openvino"
},
"NVIDIA_GPU_INT8": {
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15int8_tensorrt.def\"",
"process_name": "tensorrt.exe",
"device_id": "cuda:0",
"device_name": CUDA_DEVICES.get("cuda:0"),
"test_name": "NVIDIA TensorRT Stable Diffusion INT8"
"test_name": "stable_diffusion_int8",
"api": "tensorrt"
},
"NVIDIA_GPU_FP16": {
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_tensorrt.def\"",
"process_name": "tensorrt.exe",
"device_id": "cuda:0",
"device_name": CUDA_DEVICES.get("cuda:0"),
"test_name": "NVIDIA TensorRT Stable Diffusion FP16"
"test_name": "stable_diffusion_fp16",
"api": "tensorrt"
},
"NVIDIA_GPU_XL_FP16": {
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_tensorrt.def\"",
"process_name": "tensorrt.exe",
"device_id": "cuda:0",
"device_name": CUDA_DEVICES.get("cuda:0"),
"test_name": "NVIDIA TensorRT Stable Diffusion FP16 XL"
"test_name": "stable_diffusion_fp16_xl",
"api": "tensorrt"
}
}
RESULTS_FILENAME = "result.xml"
REPORT_PATH = LOG_DIR / RESULTS_FILENAME
@@ -228,6 +243,7 @@ try:
"end_time": seconds_to_milliseconds(end_time),
"test": "Procyon AI Image Generation",
"test_parameter": BENCHMARK_CONFIG[args.engine]["test_name"],
"api": BENCHMARK_CONFIG[args.engine]["api"],
"test_version": find_test_version(),
"device_name": BENCHMARK_CONFIG[args.engine]["device_name"],
"procyon_version": find_procyon_version(),

View File

@@ -34,63 +34,74 @@ BENCHMARK_CONFIG = {
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_all.def\"",
"process_name": "Handler.exe",
"result_regex": r"<AIImageGenerationOverallScore>(\d+)",
"test_name": "All LLM Model Text Generation ONNX"
"test_name": "all_models",
"api": "onnx"
},
"Llama_2_13B_ONNX": {
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_llama2.def\"",
"process_name": "Handler.exe",
"result_regex": r"<AiTextGenerationLlama2OverallScore>(\d+)",
"test_name": "LLama 2 Text Generation ONNX"
"test_name": "llama_2_13b",
"api": "onnx"
},
"Llama_3_1_8B_ONNX": {
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_llama3.1.def\"",
"process_name": "Handler.exe",
"result_regex": r"<AiTextGenerationLlama3OverallScore>(\d+)",
"test_name": "Llama 3.1 Text Generation ONNX"
"test_name": "llama_3_1_8b",
"api": "onnx"
},
"Mistral_7B_ONNX": {
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_mistral.def\"",
"process_name": "Handler.exe",
"result_regex": r"<AiTextGenerationMistralOverallScore>(\d+)",
"test_name": "Mistral Text Generation ONNX"
"test_name": "mistral_7b",
"api": "onnx"
},
"Phi_3_5_ONNX": {
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_phi.def\"",
"process_name": "Handler.exe",
"result_regex": r"<AiTextGenerationPhiOverallScore>(\d+)",
"test_name": "Phi Text Generation ONNX"
"test_name": "phi_3_5",
"api": "onnx"
},
"All_Models_OPENVINO": {
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_all_openvino.def\"",
"process_name": "Handler.exe",
"result_regex": r"<AIImageGenerationOverallScore>(\d+)",
"test_name": "All LLM Model Text Generation OPENVINO"
"test_name": "all_models",
"api": "openvino"
},
"Llama_2_13B_OPENVINO": {
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_llama2_openvino.def\"",
"process_name": "Handler.exe",
"result_regex": r"<AiTextGenerationLlama2OverallScore>(\d+)",
"test_name": "LLama 2 Text Generation OPENVINO"
"test_name": "llama_2_13b",
"api": "openvino"
},
"Llama_3_1_8B_OPENVINO": {
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_llama3.1_openvino.def\"",
"process_name": "Handler.exe",
"result_regex": r"<AiTextGenerationLlama3OverallScore>(\d+)",
"test_name": "Llama 3.1 Text Generation OPENVINO"
"test_name": "llama_3_1_8b",
"api": "openvino"
},
"Mistral_7B_OPENVINO": {
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_mistral_openvino.def\"",
"process_name": "Handler.exe",
"result_regex": r"<AiTextGenerationMistralOverallScore>(\d+)",
"test_name": "Mistral Text Generation OPENVINO"
"test_name": "mistral_7b",
"api": "openvino"
},
"Phi_3_5_OPENVINO": {
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_phi_openvino.def\"",
"process_name": "Handler.exe",
"result_regex": r"<AiTextGenerationPhiOverallScore>(\d+)",
"test_name": "Phi Text Generation OPENVINO"
"test_name": "phi_3_5",
"api": "openvino"
}
}
RESULTS_FILENAME = "result.xml"
REPORT_PATH = LOG_DIR / RESULTS_FILENAME
@@ -203,7 +214,9 @@ try:
report = {
"start_time": seconds_to_milliseconds(start_time),
"end_time": seconds_to_milliseconds(end_time),
"test": test_type[0],
"test": "Procyon AI Text Generation",
"test_parameter": test_type[1]["test_name"],
"api": test_type[1]["api"],
"test_version": find_test_version(),
"procyon_version": find_procyon_version(),
"unit": "score",

View File

@@ -10,6 +10,7 @@ import logging
SCRIPT_DIR = Path(__file__).resolve().parent
LOG_DIR = SCRIPT_DIR / "run"
def is_process_running(process_name):
"""check if given process is running"""
for process in psutil.process_iter(['pid', 'name']):
@@ -17,6 +18,7 @@ def is_process_running(process_name):
return process
return None
def regex_find_score_in_xml(result_regex):
"""Reads score from local game log"""
score_pattern = re.compile(result_regex)
@@ -30,26 +32,29 @@ def regex_find_score_in_xml(result_regex):
score_value = score_match.group(1)
return score_value
def get_install_path() -> str:
"""Gets the path to the Steam installation directory from the SteamPath registry key"""
reg_path = r"Software\UL\Procyon"
reg_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, reg_path, 0, winreg.KEY_READ)
value, _ = winreg.QueryValueEx(reg_key, "InstallDir")
reg_key = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
reg_path, 0, winreg.KEY_READ)
value, _ = winreg.QueryValueEx(reg_key, "InstallDir")
return value
def find_procyon_version() -> str:
"""Gets the version of an executable located in the install path."""
install_path = get_install_path()
if not install_path:
logging.info("Installation path not found.")
return None
return ""
exe_path = os.path.join(install_path, "ProcyonCmd.exe")
if not os.path.exists(exe_path):
logging.info(f"Executable not found at {exe_path}")
return None
logging.info("Executable not found at %s", exe_path)
return ""
try:
# Get all file version info
@@ -61,7 +66,7 @@ def find_procyon_version() -> str:
if ms is None or ls is None:
logging.info("No FileVersionMS or FileVersionLS found.")
return None
return ""
# Convert to human-readable version: major.minor.build.revision
major = ms >> 16
@@ -73,29 +78,31 @@ def find_procyon_version() -> str:
return version
except Exception as e:
logging.info(f"Error retrieving version info from {exe_path}: {e}")
return None # Return None if version info retrieval fails
logging.info("Error retrieving version info from %s: %s", exe_path, e)
return "" # Return empty string if version info retrieval fails
def find_test_version() -> str:
"""Gets the version of an executable located in the chops path."""
chops_path = "C:\\ProgramData\\UL\\Procyon\\chops\\dlc\\ai-textgeneration-benchmark\\x64"
logging.info(f"The install path for the test is {chops_path}")
logging.info("The install path for the test is %s", chops_path)
if not chops_path:
logging.info("Installation path not found.")
return None
return ""
exe_path = os.path.join(chops_path, "Handler.exe")
if not os.path.exists(exe_path):
logging.info(f"Executable 'Handler.exe' not found at {exe_path}")
return None
logging.info("Executable 'Handler.exe' not found at %s", exe_path)
return ""
try:
lang, codepage = win32api.GetFileVersionInfo(exe_path, "\\VarFileInfo\\Translation")[0]
lang, codepage = win32api.GetFileVersionInfo(
exe_path, "\\VarFileInfo\\Translation")[0]
str_info_path = f"\\StringFileInfo\\{lang:04X}{codepage:04X}\\ProductVersion"
return win32api.GetFileVersionInfo(exe_path, str_info_path)
return str(win32api.GetFileVersionInfo(exe_path, str_info_path))
except Exception as e:
logging.info(f"Error retrieving version info from {exe_path}: {e}")
return None # Return None if version info retrieval fails
logging.info("Error retrieving version info from %s: %s", exe_path, e)
return "" # Return empty string if version info retrieval fails