mirror of
https://github.com/LTTLabsOSS/markbench-tests.git
synced 2026-01-09 14:07:56 -05:00
Merge branch 'main' into jd/handheldfixes
This commit is contained in:
@@ -80,7 +80,7 @@ def download_scene(scene: BlenderScene) -> None:
|
||||
|
||||
def copy_scene_from_network_drive(file_name, destination):
|
||||
"""copy blend file from network drive"""
|
||||
network_dir = Path("\\\\labs.lmg.gg\\labs.lmg.gg\\03_ProcessingFiles\\Blender Render")
|
||||
network_dir = Path("\\\\labs.lmg.gg\\labs\\03_ProcessingFiles\\Blender Render")
|
||||
source_path = network_dir.joinpath(file_name)
|
||||
logging.info("Copying %s from %s", file_name, source_path)
|
||||
shutil.copyfile(source_path, destination)
|
||||
|
||||
11
dota2/benchmark_load.cfg
Normal file
11
dota2/benchmark_load.cfg
Normal file
@@ -0,0 +1,11 @@
|
||||
sv_log 0
|
||||
sv_alternateticks 0
|
||||
dota_spectator_use_broadcaster_stats_panel 0
|
||||
dota_spectator_mode 0
|
||||
dota_spectator_hero_index ""
|
||||
cl_showdemooverlay 0; // still broken, no other command usable to hide demo overlay
|
||||
demo_usefastgoto 1
|
||||
fps_max 0
|
||||
|
||||
playdemo replays\benchmark
|
||||
sleep 20000
|
||||
@@ -1,20 +1,6 @@
|
||||
push_var_values
|
||||
|
||||
showconsole
|
||||
|
||||
sv_log 0
|
||||
sv_alternateticks 0
|
||||
dota_spectator_use_broadcaster_stats_panel 0
|
||||
dota_spectator_mode 0
|
||||
dota_spectator_hero_index ""
|
||||
cl_showdemooverlay 0; // still broken, no other command usable to hide demo overlay
|
||||
demo_usefastgoto 1
|
||||
fps_max 0
|
||||
|
||||
playdemo replays\benchmark
|
||||
sleep 20000
|
||||
|
||||
//// DISABLE ALL CONSOLE SPEW SOURCES FOR A CLEAN SCRIPT OUTPUT ( VALVE PLZ GIFF SINGLE CMD FOR IT )
|
||||
/// DISABLE ALL CONSOLE SPEW SOURCES FOR A CLEAN SCRIPT OUTPUT ( VALVE PLZ GIFF SINGLE CMD FOR IT )
|
||||
log_flags Console +DoNotEcho | grep %
|
||||
log_flags General Developer DeveloperConsole Panel Panorama PanoramaScript VScript VScriptDbg VScriptScripts CustomUI CustomGameCache CustomNetTable +DoNotEcho
|
||||
log_flags AnimationGraph AnimationSystem AnimGraphManager AnimResource Assert "BitBuf Error" BoneSetup Client "Combat Analyzer" CommandLine D3D Decals Demo DeveloperVerbose DotaGuide DOTAHLTVCamera +DoNotEcho
|
||||
@@ -5,7 +5,7 @@ import time
|
||||
import pyautogui as gui
|
||||
import pydirectinput as user
|
||||
import sys
|
||||
from dota2_utils import get_resolution, copy_replay, copy_config, get_args
|
||||
from dota2_utils import get_resolution, verify_replay, copy_replay, copy_config, get_args
|
||||
|
||||
sys.path.insert(1, str(Path(sys.path[0]).parent))
|
||||
|
||||
@@ -27,6 +27,7 @@ LOG_DIRECTORY = SCRIPT_DIRECTORY / "run"
|
||||
PROCESS_NAME = "dota2.exe"
|
||||
STEAM_GAME_ID = 570
|
||||
|
||||
|
||||
setup_log_directory(str(LOG_DIRECTORY))
|
||||
logging.basicConfig(filename=f'{LOG_DIRECTORY}/harness.log',
|
||||
format=DEFAULT_LOGGING_FORMAT,
|
||||
@@ -39,13 +40,14 @@ logging.getLogger('').addHandler(console)
|
||||
|
||||
args = get_args()
|
||||
kerasService = KerasService(args.keras_host, args.keras_port)
|
||||
am = ArtifactManager(LOG_DIRECTORY)
|
||||
|
||||
user.FAILSAFE = False
|
||||
|
||||
def start_game():
|
||||
"""Launch the game with console enabled and FPS unlocked"""
|
||||
return exec_steam_game(
|
||||
STEAM_GAME_ID, game_params=["-console", "+fps_max 0"])
|
||||
STEAM_GAME_ID, game_params=["-console", "+fps_max 0", "-novid"])
|
||||
|
||||
|
||||
def console_command(command):
|
||||
@@ -53,31 +55,14 @@ def console_command(command):
|
||||
gui.write(command)
|
||||
user.press("enter")
|
||||
|
||||
|
||||
def run_benchmark():
|
||||
"""Run dota2 benchmark"""
|
||||
am = ArtifactManager(LOG_DIRECTORY)
|
||||
def harness_setup():
|
||||
"""Copies the replay and config files to the appropriate spots"""
|
||||
verify_replay()
|
||||
copy_replay()
|
||||
copy_config()
|
||||
setup_start_time = int(time.time())
|
||||
start_game()
|
||||
time.sleep(10) # wait for game to load into main menu
|
||||
|
||||
# to skip logo screen
|
||||
if kerasService.wait_for_word(word="va", timeout=20, interval=1):
|
||||
logging.info('Game started. Entering main menu')
|
||||
user.press("esc")
|
||||
time.sleep(1)
|
||||
|
||||
# waiting about a minute for the main menu to appear
|
||||
if kerasService.wait_for_word(
|
||||
word="heroes", timeout=80, interval=1) is None:
|
||||
logging.error(
|
||||
"Game didn't start in time. Check settings and try again.")
|
||||
sys.exit(1)
|
||||
|
||||
time.sleep(15) # wait for main menu
|
||||
|
||||
def screenshot_settings():
|
||||
"""Screenshots the settings for the game"""
|
||||
screen_height, screen_width = get_resolution()
|
||||
location = None
|
||||
click_multiple = 0
|
||||
@@ -129,7 +114,6 @@ def run_benchmark():
|
||||
time.sleep(0.2)
|
||||
gui.mouseUp()
|
||||
time.sleep(0.2)
|
||||
|
||||
if kerasService.wait_for_word(
|
||||
word="resolution", timeout=30, interval=1) is None:
|
||||
logging.info(
|
||||
@@ -160,20 +144,46 @@ def run_benchmark():
|
||||
|
||||
am.take_screenshot("video3.png", ArtifactType.CONFIG_IMAGE,
|
||||
"picture of video settings")
|
||||
# starting the benchmark
|
||||
|
||||
def load_the_benchmark():
|
||||
"""Loads the replay and runs the benchmark"""
|
||||
user.press("escape")
|
||||
logging.info('Starting benchmark')
|
||||
user.press("\\")
|
||||
time.sleep(0.2)
|
||||
console_command("exec_async benchmark")
|
||||
time.sleep(0.5)
|
||||
console_command("sv_cheats true")
|
||||
time.sleep(1)
|
||||
user.press("\\")
|
||||
|
||||
console_command("exec_async benchmark_load")
|
||||
time.sleep(5)
|
||||
if kerasService.wait_for_word(
|
||||
word="directed", timeout=30, interval=0.1) is None:
|
||||
logging.error("Didn't see directed camera. Did the replay load?")
|
||||
word="directed", timeout=30, interval=1) is None:
|
||||
logging.info(
|
||||
"Did not find the directed camera. Did the replay load?")
|
||||
sys.exit(1)
|
||||
console_command("sv_cheats true")
|
||||
time.sleep(1)
|
||||
console_command("exec_async benchmark_run")
|
||||
user.press("\\")
|
||||
|
||||
def run_benchmark():
|
||||
"""Run dota2 benchmark"""
|
||||
harness_setup()
|
||||
setup_start_time = int(time.time())
|
||||
start_game()
|
||||
time.sleep(10) # wait for game to load into main menu
|
||||
|
||||
# waiting about a minute for the main menu to appear
|
||||
if kerasService.wait_for_word(
|
||||
word="heroes", timeout=80, interval=1) is None:
|
||||
logging.error(
|
||||
"Game didn't start in time. Check settings and try again.")
|
||||
sys.exit(1)
|
||||
|
||||
time.sleep(15) # wait for main menu
|
||||
screenshot_settings()
|
||||
|
||||
# starting the benchmark
|
||||
load_the_benchmark()
|
||||
|
||||
setup_end_time = int(time.time())
|
||||
elapsed_setup_time = round(setup_end_time - setup_start_time, 2)
|
||||
|
||||
@@ -34,36 +34,44 @@ def get_install_path():
|
||||
return DEFAULT_INSTALL_PATH
|
||||
return install_path
|
||||
|
||||
|
||||
def copy_replay_from_network_drive():
|
||||
"""Copies replay file from network drive to harness folder"""
|
||||
src_path = Path(r"\\labs.lmg.gg\labs\03_ProcessingFiles\Dota2\benchmark.dem")
|
||||
dest_path = SCRIPT_DIRECTORY / "benchmark.dem"
|
||||
shutil.copyfile(src_path, dest_path)
|
||||
try:
|
||||
logging.info("Copying the replay from the network drive to the harness folder.")
|
||||
shutil.copyfile(src_path, dest_path)
|
||||
except OSError as err:
|
||||
logging.error("Network copy failed: %s", err)
|
||||
raise
|
||||
|
||||
def verify_replay() -> None:
|
||||
"""Ensure the replay exists in SCRIPT_DIRECTORY."""
|
||||
src_path = SCRIPT_DIRECTORY / "benchmark.dem"
|
||||
|
||||
if src_path.exists():
|
||||
logging.info("The replay exists in the harness folder. Copying the files.")
|
||||
return
|
||||
|
||||
logging.info("The replay file doesn't exist in the harness folder.")
|
||||
copy_replay_from_network_drive()
|
||||
|
||||
def copy_replay() -> None:
|
||||
"""Copy replay file to dota 2 folder"""
|
||||
"""Copyihg the replay"""
|
||||
replay_path = Path(get_install_path(), "game\\dota\\replays")
|
||||
replay_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
src_path = SCRIPT_DIRECTORY / "benchmark.dem"
|
||||
dest_path = replay_path / "benchmark.dem"
|
||||
|
||||
#Try copying the benchmark to the correct area.
|
||||
try:
|
||||
replay_path = Path(get_install_path(), "game\\dota\\replays")
|
||||
replay_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
src_path = SCRIPT_DIRECTORY / "benchmark.dem"
|
||||
dest_path = replay_path / "benchmark.dem"
|
||||
|
||||
logging.info("Copying: %s -> %s", src_path, dest_path)
|
||||
shutil.copy(src_path, dest_path)
|
||||
return
|
||||
except OSError:
|
||||
logging.error("Could not copy local replay file; Trying from network drive.")
|
||||
try:
|
||||
copy_replay_from_network_drive()
|
||||
|
||||
logging.info("Copying: %s -> %s", src_path, dest_path)
|
||||
shutil.copy(src_path, dest_path)
|
||||
except OSError as err:
|
||||
logging.error("Could not copy replay file.")
|
||||
raise err
|
||||
logging.error("Could not copy copy the replay file: %s", err)
|
||||
raise
|
||||
|
||||
|
||||
def copy_config() -> None:
|
||||
@@ -72,14 +80,17 @@ def copy_config() -> None:
|
||||
config_path = Path(get_install_path(), "game\\dota\\cfg")
|
||||
config_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
src_path = SCRIPT_DIRECTORY / "benchmark.cfg"
|
||||
dest_path = config_path / "benchmark.cfg"
|
||||
files_to_copy = ["benchmark_run.cfg", "benchmark_load.cfg"]
|
||||
|
||||
logging.info("Copying: %s -> %s", src_path, dest_path)
|
||||
shutil.copy(src_path, dest_path)
|
||||
for filename in files_to_copy:
|
||||
src_path = SCRIPT_DIRECTORY / filename
|
||||
dest_path = config_path / filename
|
||||
|
||||
logging.info("Copying: %s -> %s", src_path, dest_path)
|
||||
shutil.copy(src_path, dest_path)
|
||||
except OSError as err:
|
||||
logging.error("Could not copy config file.")
|
||||
raise err
|
||||
logging.error("Could not copy config files: %s", err)
|
||||
raise
|
||||
|
||||
|
||||
def read_config() -> list[str] | None:
|
||||
|
||||
@@ -19,9 +19,11 @@ from harness_utils.output import (
|
||||
DEFAULT_DATE_FORMAT,
|
||||
)
|
||||
from harness_utils.process import terminate_processes
|
||||
from harness_utils.artifacts import ArtifactManager, ArtifactType
|
||||
from harness_utils.rtss import start_rtss_process, copy_rtss_profile
|
||||
from harness_utils.steam import exec_steam_run_command
|
||||
from harness_utils.keras_service import KerasService
|
||||
from harness_utils.misc import press_n_times
|
||||
|
||||
STEAM_GAME_ID = 1551360
|
||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
@@ -65,6 +67,27 @@ def run_benchmark():
|
||||
user.press("x")
|
||||
time.sleep(2)
|
||||
|
||||
result = kerasService.wait_for_word("video", timeout=30)
|
||||
if not result:
|
||||
logging.info("Game didn't load to the settings menu.")
|
||||
sys.exit(1)
|
||||
|
||||
logging.info("Video found, clicking and continuing.")
|
||||
gui.moveTo(result["x"], result["y"])
|
||||
time.sleep(0.2)
|
||||
gui.mouseDown()
|
||||
time.sleep(0.2)
|
||||
gui.mouseUp()
|
||||
am.take_screenshot("Video_pt.png", ArtifactType.CONFIG_IMAGE, "Video menu")
|
||||
time.sleep(0.2)
|
||||
press_n_times("down",19,0.1)
|
||||
am.take_screenshot("Video_pt2.png", ArtifactType.CONFIG_IMAGE, "Video menu2")
|
||||
press_n_times("down",5,0.1)
|
||||
am.take_screenshot("Video_pt3.png", ArtifactType.CONFIG_IMAGE, "Video menu3")
|
||||
time.sleep(0.2)
|
||||
user.press("escape")
|
||||
time.sleep(1)
|
||||
|
||||
result = kerasService.wait_for_word("graphics", timeout=30)
|
||||
if not result:
|
||||
logging.info("Game didn't load to the settings menu.")
|
||||
@@ -76,6 +99,13 @@ def run_benchmark():
|
||||
gui.mouseDown()
|
||||
time.sleep(0.2)
|
||||
gui.mouseUp()
|
||||
time.sleep(0.2)
|
||||
am.take_screenshot("graphics_pt.png", ArtifactType.CONFIG_IMAGE, "graphics menu")
|
||||
time.sleep(0.2)
|
||||
press_n_times("down",16,0.1)
|
||||
am.take_screenshot("graphics_pt2.png", ArtifactType.CONFIG_IMAGE, "graphics menu2")
|
||||
time.sleep(0.1)
|
||||
user.press("down")
|
||||
time.sleep(1)
|
||||
|
||||
result = kerasService.wait_for_word("benchmark", timeout=12)
|
||||
@@ -135,6 +165,7 @@ parser.add_argument("--kerasPort", dest="keras_port",
|
||||
help="Port for Keras OCR service", required=True)
|
||||
args = parser.parse_args()
|
||||
kerasService = KerasService(args.keras_host, args.keras_port)
|
||||
am = ArtifactManager(LOG_DIRECTORY)
|
||||
|
||||
try:
|
||||
start_time, end_time = run_benchmark()
|
||||
@@ -144,7 +175,7 @@ try:
|
||||
"start_time": seconds_to_milliseconds(start_time),
|
||||
"end_time": seconds_to_milliseconds(end_time)
|
||||
}
|
||||
|
||||
am.create_manifest()
|
||||
write_report_json(LOG_DIRECTORY, "report.json", report)
|
||||
except Exception as e:
|
||||
logging.error("Something went wrong running the benchmark!")
|
||||
|
||||
@@ -24,7 +24,7 @@ def install_mingw() -> str:
|
||||
if str(MINGW_FOLDER) not in original_path:
|
||||
os.environ['PATH'] = str(MINGW_FOLDER.joinpath('bin')) + os.pathsep + original_path
|
||||
return "existing mingw installation detected"
|
||||
source = Path("\\\\labs.lmg.gg\\labs.lmg.gg\\01_Installers_Utilities\\MinGW\\").joinpath(MINGW_ZIP)
|
||||
source = Path("\\\\labs.lmg.gg\\labs\\01_Installers_Utilities\\MinGW\\").joinpath(MINGW_ZIP)
|
||||
destination = SCRIPT_DIR.joinpath(MINGW_ZIP)
|
||||
shutil.copyfile(source, destination)
|
||||
with ZipFile(destination, 'r') as zip_object:
|
||||
@@ -36,7 +36,7 @@ def install_mingw() -> str:
|
||||
|
||||
def copy_miniconda_from_network_drive():
|
||||
"""copies miniconda installer from network drive"""
|
||||
source = Path("\\\\labs.lmg.gg\\labs.lmg.gg\\01_Installers_Utilities\\Miniconda\\").joinpath(
|
||||
source = Path("\\\\labs.lmg.gg\\labs\\01_Installers_Utilities\\Miniconda\\").joinpath(
|
||||
MINICONDA_INSTALLER)
|
||||
destination = SCRIPT_DIR.joinpath(MINICONDA_INSTALLER)
|
||||
shutil.copyfile(source, destination)
|
||||
@@ -72,7 +72,7 @@ def copy_godot_source_from_network_drive() -> str:
|
||||
if SCRIPT_DIR.joinpath(GODOT_DIR).is_dir():
|
||||
return "existing godot source directory detected"
|
||||
zip_name = f"{GODOT_DIR}.zip"
|
||||
source = Path("\\\\labs.lmg.gg\\labs.lmg.gg\\03_ProcessingFiles\\Godot Files\\").joinpath(zip_name)
|
||||
source = Path("\\\\labs.lmg.gg\\labs\\03_ProcessingFiles\\Godot Files\\").joinpath(zip_name)
|
||||
destination = SCRIPT_DIR.joinpath(zip_name)
|
||||
shutil.copyfile(source, destination)
|
||||
with ZipFile(destination, 'r') as zip_object:
|
||||
|
||||
@@ -18,7 +18,7 @@ def handbrake_present() -> bool:
|
||||
def copy_handbrake_from_network_drive():
|
||||
"""copy handbrake cli from network drive"""
|
||||
source = Path(
|
||||
"\\\\labs.lmg.gg\\labs.lmg.gg\\01_Installers_Utilities\\Handbrake\\X86\\HandBrakeCLI-1.9.1-win-x86_64\\")
|
||||
"\\\\labs.lmg.gg\\labs\\01_Installers_Utilities\\Handbrake\\X86\\HandBrakeCLI-1.9.1-win-x86_64\\")
|
||||
copy_souce = source / HANDBRAKE_EXECUTABLE
|
||||
destination = SCRIPT_DIR / HANDBRAKE_EXECUTABLE
|
||||
shutil.copyfile(copy_souce, destination)
|
||||
|
||||
@@ -1,24 +1,26 @@
|
||||
"""UL Procyon Computer Vision test script"""
|
||||
|
||||
# pylint: disable=no-name-in-module
|
||||
from argparse import ArgumentParser
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from argparse import ArgumentParser
|
||||
from pathlib import Path
|
||||
|
||||
import psutil
|
||||
from utils import (
|
||||
find_score_in_xml,
|
||||
is_process_running,
|
||||
get_install_path,
|
||||
find_procyon_version,
|
||||
find_test_version
|
||||
|
||||
find_score_in_xml,
|
||||
find_test_version,
|
||||
get_install_path,
|
||||
is_process_running,
|
||||
)
|
||||
|
||||
PARENT_DIR = str(Path(sys.path[0], ".."))
|
||||
sys.path.append(PARENT_DIR)
|
||||
|
||||
from harness_utils.artifacts import ArtifactManager, ArtifactType
|
||||
from harness_utils.output import (
|
||||
DEFAULT_DATE_FORMAT,
|
||||
DEFAULT_LOGGING_FORMAT,
|
||||
@@ -27,11 +29,12 @@ from harness_utils.output import (
|
||||
write_report_json,
|
||||
)
|
||||
from harness_utils.procyoncmd import (
|
||||
get_winml_devices,
|
||||
get_cuda_devices,
|
||||
get_openvino_devices,
|
||||
get_openvino_gpu,
|
||||
get_cuda_devices,
|
||||
get_winml_devices,
|
||||
)
|
||||
|
||||
#####
|
||||
# Globals
|
||||
#####
|
||||
@@ -48,104 +51,114 @@ CUDA_DEVICES = get_cuda_devices(ABS_EXECUTABLE_PATH)
|
||||
CONFIG_DIR = SCRIPT_DIR / "config"
|
||||
BENCHMARK_CONFIG = {
|
||||
"AMD_CPU": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_winml_cpu.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_computer_vision_winml_cpu.def"',
|
||||
"process_name": "WinML.exe",
|
||||
"device_name": "CPU",
|
||||
# TODO: Find a good way to report the CPU name here.
|
||||
"device_id": "CPU",
|
||||
"test_name": "cpu_float32",
|
||||
"api": "winml"
|
||||
"api": "winml",
|
||||
},
|
||||
"AMD_GPU0": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_winml_gpu.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_computer_vision_winml_gpu.def"',
|
||||
"process_name": "WinML.exe",
|
||||
"device_name": list(WINML_DEVICES.keys())[0],
|
||||
"device_id": list(WINML_DEVICES.values())[0],
|
||||
"test_name": "gpu_float32",
|
||||
"api": "winml"
|
||||
"api": "winml",
|
||||
},
|
||||
"AMD_GPU1": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_winml_gpu.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_computer_vision_winml_gpu.def"',
|
||||
"process_name": "WinML.exe",
|
||||
"device_name": list(WINML_DEVICES.keys())[1] if len(list(WINML_DEVICES.keys())) > 1 else list(WINML_DEVICES.keys())[0],
|
||||
"device_id": list(WINML_DEVICES.values())[1] if len(list(WINML_DEVICES.values())) > 1 else list(WINML_DEVICES.values())[0],
|
||||
"device_name": list(WINML_DEVICES.keys())[1]
|
||||
if len(list(WINML_DEVICES.keys())) > 1
|
||||
else list(WINML_DEVICES.keys())[0],
|
||||
"device_id": list(WINML_DEVICES.values())[1]
|
||||
if len(list(WINML_DEVICES.values())) > 1
|
||||
else list(WINML_DEVICES.values())[0],
|
||||
"test_name": "gpu_float32",
|
||||
"api": "winml"
|
||||
"api": "winml",
|
||||
},
|
||||
"Intel_CPU": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_openvino_cpu.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_computer_vision_openvino_cpu.def"',
|
||||
"process_name": "OpenVino.exe",
|
||||
"device_id": "CPU",
|
||||
"device_name": OPENVINO_DEVICES["CPU"],
|
||||
"test_name": "cpu_float32",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"Intel_GPU0": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_openvino_gpu.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_computer_vision_openvino_gpu.def"',
|
||||
"process_name": "OpenVino.exe",
|
||||
"device_id": "GPU.0" if "GPU.0" in list(OPENVINO_DEVICES.keys()) else "GPU",
|
||||
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.0"),
|
||||
"test_name": "gpu_float32",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"Intel_GPU1": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_openvino_gpu.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_computer_vision_openvino_gpu.def"',
|
||||
"process_name": "OpenVino.exe",
|
||||
"device_id": "GPU.1" if "GPU.1" in list(OPENVINO_DEVICES.keys()) else "GPU",
|
||||
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.0"),
|
||||
"test_name": "gpu_float32",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"Intel_NPU": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_openvino_npu.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_computer_vision_openvino_npu.def"',
|
||||
"process_name": "OpenVino.exe",
|
||||
"device_id": "NPU",
|
||||
"device_name": OPENVINO_DEVICES.get("NPU", "None"),
|
||||
"test_name": "npu_float32",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"NVIDIA_GPU": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_tensorrt.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_computer_vision_tensorrt.def"',
|
||||
"device_id": "cuda:0",
|
||||
"device_name": CUDA_DEVICES.get("cuda:0"),
|
||||
"process_name": "TensorRT.exe",
|
||||
"test_name": "gpu_float32",
|
||||
"api": "tensorrt"
|
||||
"api": "tensorrt",
|
||||
},
|
||||
"Qualcomm_HTP": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_computer_vision_snpe.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_computer_vision_snpe.def"',
|
||||
"device_id": "CPU",
|
||||
"device_name": "CPU",
|
||||
"process_name": "SNPE.exe",
|
||||
"test_name": "htp_integer",
|
||||
"api": "snpe"
|
||||
"api": "snpe",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
RESULTS_FILENAME = "result.xml"
|
||||
REPORT_PATH = LOG_DIR / RESULTS_FILENAME
|
||||
RESULTS_XML_PATH = LOG_DIR / RESULTS_FILENAME
|
||||
|
||||
|
||||
def setup_logging():
|
||||
"""setup logging"""
|
||||
setup_log_directory(str(LOG_DIR))
|
||||
logging.basicConfig(filename=LOG_DIR / "harness.log",
|
||||
format=DEFAULT_LOGGING_FORMAT,
|
||||
datefmt=DEFAULT_DATE_FORMAT,
|
||||
level=logging.DEBUG)
|
||||
logging.basicConfig(
|
||||
filename=LOG_DIR / "harness.log",
|
||||
format=DEFAULT_LOGGING_FORMAT,
|
||||
datefmt=DEFAULT_DATE_FORMAT,
|
||||
level=logging.DEBUG,
|
||||
)
|
||||
console = logging.StreamHandler()
|
||||
formatter = logging.Formatter(DEFAULT_LOGGING_FORMAT)
|
||||
console.setFormatter(formatter)
|
||||
logging.getLogger('').addHandler(console)
|
||||
logging.getLogger("").addHandler(console)
|
||||
|
||||
|
||||
def get_arguments():
|
||||
"""get arguments"""
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--engine", dest="engine", help="Engine test type", required=True,
|
||||
choices=BENCHMARK_CONFIG.keys())
|
||||
"--engine",
|
||||
dest="engine",
|
||||
help="Engine test type",
|
||||
required=True,
|
||||
choices=BENCHMARK_CONFIG.keys(),
|
||||
)
|
||||
argies = parser.parse_args()
|
||||
return argies
|
||||
|
||||
@@ -154,23 +167,28 @@ def create_procyon_command(test_option, process_name, device_id):
|
||||
"""create command string"""
|
||||
command = str()
|
||||
|
||||
if device_id == 'CPU':
|
||||
command = f'\"{ABS_EXECUTABLE_PATH}\" --definition={test_option} --export=\"{REPORT_PATH}\"'
|
||||
if device_id == "CPU":
|
||||
command = f'"{ABS_EXECUTABLE_PATH}" --definition={test_option} --export="{RESULTS_XML_PATH}"'
|
||||
else:
|
||||
match process_name:
|
||||
case 'WinML.exe':
|
||||
command = f'\"{ABS_EXECUTABLE_PATH}\" --definition={test_option} --export=\"{REPORT_PATH}\" --select-winml-device {device_id}'
|
||||
case 'OpenVino.exe':
|
||||
command = f'\"{ABS_EXECUTABLE_PATH}\" --definition={test_option} --export=\"{REPORT_PATH}\" --select-openvino-device {device_id}'
|
||||
case 'TensorRT.exe':
|
||||
command = f'\"{ABS_EXECUTABLE_PATH}\" --definition={test_option} --export=\"{REPORT_PATH}\" --select-cuda-device {device_id}'
|
||||
case "WinML.exe":
|
||||
command = f'"{ABS_EXECUTABLE_PATH}" --definition={test_option} --export="{RESULTS_XML_PATH}" --select-winml-device {device_id}'
|
||||
case "OpenVino.exe":
|
||||
command = f'"{ABS_EXECUTABLE_PATH}" --definition={test_option} --export="{RESULTS_XML_PATH}" --select-openvino-device {device_id}'
|
||||
case "TensorRT.exe":
|
||||
command = f'"{ABS_EXECUTABLE_PATH}" --definition={test_option} --export="{RESULTS_XML_PATH}" --select-cuda-device {device_id}'
|
||||
command = command.rstrip()
|
||||
return command
|
||||
|
||||
|
||||
def run_benchmark(process_name, command_to_run):
|
||||
"""run the benchmark"""
|
||||
with subprocess.Popen(command_to_run, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) as proc:
|
||||
with subprocess.Popen(
|
||||
command_to_run,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
) as proc:
|
||||
logging.info("Procyon AI Computer Vision benchmark has started.")
|
||||
while True:
|
||||
now = time.time()
|
||||
@@ -192,12 +210,13 @@ try:
|
||||
logging.info("Detected OpenVino Devices: %s", str(OPENVINO_DEVICES))
|
||||
logging.info("Detected CUDA Devices: %s", (CUDA_DEVICES))
|
||||
|
||||
am = ArtifactManager(LOG_DIR)
|
||||
args = get_arguments()
|
||||
option = BENCHMARK_CONFIG[args.engine]["config"]
|
||||
proc_name = BENCHMARK_CONFIG[args.engine]["process_name"]
|
||||
dev_id = BENCHMARK_CONFIG[args.engine]["device_id"]
|
||||
cmd = create_procyon_command(option, proc_name, dev_id)
|
||||
logging.info('Starting benchmark!')
|
||||
logging.info("Starting benchmark!")
|
||||
logging.info(cmd)
|
||||
start_time = time.time()
|
||||
pr = run_benchmark(BENCHMARK_CONFIG[args.engine]["process_name"], cmd)
|
||||
@@ -211,6 +230,7 @@ try:
|
||||
logging.error("Could not find overall score!")
|
||||
sys.exit(1)
|
||||
|
||||
am.copy_file(RESULTS_XML_PATH, ArtifactType.RESULTS_TEXT, "results xml file")
|
||||
end_time = time.time()
|
||||
elapsed_test_time = round(end_time - start_time, 2)
|
||||
logging.info("Benchmark took %.2f seconds", elapsed_test_time)
|
||||
@@ -226,9 +246,9 @@ try:
|
||||
"device_name": BENCHMARK_CONFIG[args.engine]["device_name"],
|
||||
"procyon_version": find_procyon_version(),
|
||||
"unit": "score",
|
||||
"score": score
|
||||
"score": score,
|
||||
}
|
||||
|
||||
am.create_manifest()
|
||||
write_report_json(str(LOG_DIR), "report.json", report)
|
||||
except Exception as e:
|
||||
logging.error("Something went wrong running the benchmark!")
|
||||
|
||||
@@ -1,29 +1,38 @@
|
||||
"""UL Procyon AI Image Generation test script"""
|
||||
|
||||
# pylint: disable=no-name-in-module
|
||||
from argparse import ArgumentParser
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from argparse import ArgumentParser
|
||||
from pathlib import Path
|
||||
|
||||
import psutil
|
||||
from utils import find_score_in_xml, is_process_running, get_install_path, find_procyon_version, find_test_version
|
||||
from utils import (
|
||||
find_procyon_version,
|
||||
find_score_in_xml,
|
||||
find_test_version,
|
||||
get_install_path,
|
||||
is_process_running,
|
||||
)
|
||||
|
||||
PARENT_DIR = str(Path(sys.path[0], ".."))
|
||||
sys.path.append(PARENT_DIR)
|
||||
|
||||
from harness_utils.procyoncmd import (
|
||||
get_winml_devices,
|
||||
get_openvino_devices,
|
||||
get_openvino_gpu,
|
||||
get_cuda_devices
|
||||
)
|
||||
from harness_utils.artifacts import ArtifactManager, ArtifactType
|
||||
from harness_utils.output import (
|
||||
DEFAULT_DATE_FORMAT,
|
||||
DEFAULT_LOGGING_FORMAT,
|
||||
seconds_to_milliseconds,
|
||||
setup_log_directory,
|
||||
write_report_json
|
||||
write_report_json,
|
||||
)
|
||||
from harness_utils.procyoncmd import (
|
||||
get_cuda_devices,
|
||||
get_openvino_devices,
|
||||
get_openvino_gpu,
|
||||
get_winml_devices,
|
||||
)
|
||||
|
||||
#####
|
||||
@@ -41,149 +50,161 @@ CUDA_DEVICES = get_cuda_devices(ABS_EXECUTABLE_PATH)
|
||||
CONFIG_DIR = SCRIPT_DIR / "config"
|
||||
BENCHMARK_CONFIG = {
|
||||
"AMD_GPU0_FP16": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_onnxruntime.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_onnxruntime.def"',
|
||||
"process_name": "ort-directml.exe",
|
||||
"device_name": list(WINML_DEVICES.keys())[0],
|
||||
"device_id": "0",
|
||||
"test_name": "stable_diffusion_fp16",
|
||||
"api": "onnx"
|
||||
"api": "onnx",
|
||||
},
|
||||
"AMD_GPU1_FP16": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_onnxruntime.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_onnxruntime.def"',
|
||||
"process_name": "ort-directml.exe",
|
||||
"device_name": list(WINML_DEVICES.keys())[1] if len(list(WINML_DEVICES.keys())) > 1 else list(WINML_DEVICES.keys())[0],
|
||||
"device_name": list(WINML_DEVICES.keys())[1]
|
||||
if len(list(WINML_DEVICES.keys())) > 1
|
||||
else list(WINML_DEVICES.keys())[0],
|
||||
"device_id": "1" if len(list(WINML_DEVICES.values())) > 1 else "0",
|
||||
"test_name": "stable_diffusion_fp16",
|
||||
"api": "onnx"
|
||||
"api": "onnx",
|
||||
},
|
||||
"AMD_GPU0_XL_FP16": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_onnxruntime.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_onnxruntime.def"',
|
||||
"process_name": "ort-directml.exe",
|
||||
"device_name": list(WINML_DEVICES.keys())[0],
|
||||
"device_id": "0",
|
||||
"test_name": "stable_diffusion_fp16_xl",
|
||||
"api": "onnx"
|
||||
"api": "onnx",
|
||||
},
|
||||
"AMD_GPU1_XL_FP16": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_onnxruntime.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_onnxruntime.def"',
|
||||
"process_name": "ort-directml.exe",
|
||||
"device_name": list(WINML_DEVICES.keys())[1] if len(list(WINML_DEVICES.keys())) > 1 else list(WINML_DEVICES.keys())[0],
|
||||
"device_id": list(WINML_DEVICES.values())[1] if len(list(WINML_DEVICES.values())) > 1 else list(WINML_DEVICES.values())[0],
|
||||
"device_name": list(WINML_DEVICES.keys())[1]
|
||||
if len(list(WINML_DEVICES.keys())) > 1
|
||||
else list(WINML_DEVICES.keys())[0],
|
||||
"device_id": list(WINML_DEVICES.values())[1]
|
||||
if len(list(WINML_DEVICES.values())) > 1
|
||||
else list(WINML_DEVICES.values())[0],
|
||||
"test_name": "stable_diffusion_fp16_xl",
|
||||
"api": "onnx"
|
||||
"api": "onnx",
|
||||
},
|
||||
"Intel_GPU0_INT8": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15int8_openvino.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sd15int8_openvino.def"',
|
||||
"process_name": "openvino.exe",
|
||||
"device_id": "GPU.0" if "GPU.0" in list(OPENVINO_DEVICES.keys()) else "GPU",
|
||||
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.0"),
|
||||
"test_name": "stable_diffusion_int8",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"Intel_GPU0_FP16": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_openvino.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_openvino.def"',
|
||||
"process_name": "openvino.exe",
|
||||
"device_id": "GPU.0" if "GPU.0" in list(OPENVINO_DEVICES.keys()) else "GPU",
|
||||
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.0"),
|
||||
"test_name": "stable_diffusion_fp16",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"Intel_GPU0_XL_FP16": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_openvino.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_openvino.def"',
|
||||
"process_name": "openvino.exe",
|
||||
"device_id": "GPU.0" if "GPU.0" in list(OPENVINO_DEVICES.keys()) else "GPU",
|
||||
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.0"),
|
||||
"test_name": "stable_diffusion_fp16_xl",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"Intel_GPU1_INT8": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15int8_openvino.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sd15int8_openvino.def"',
|
||||
"process_name": "openvino.exe",
|
||||
"device_id": "GPU.1" if "GPU.1" in list(OPENVINO_DEVICES.keys()) else "GPU",
|
||||
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.1"),
|
||||
"test_name": "stable_diffusion_int8",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"Intel_GPU1_FP16": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_openvino.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_openvino.def"',
|
||||
"process_name": "openvino.exe",
|
||||
"device_id": "GPU.1" if "GPU.1" in list(OPENVINO_DEVICES.keys()) else "GPU",
|
||||
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.1"),
|
||||
"test_name": "stable_diffusion_fp16",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"Intel_GPU1_XL_FP16": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_openvino.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_openvino.def"',
|
||||
"process_name": "openvino.exe",
|
||||
"device_id": "GPU.1" if "GPU.1" in list(OPENVINO_DEVICES.keys()) else "GPU",
|
||||
"device_name": get_openvino_gpu(OPENVINO_DEVICES, "GPU.1"),
|
||||
"test_name": "stable_diffusion_fp16_xl",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"NVIDIA_GPU_INT8": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15int8_tensorrt.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sd15int8_tensorrt.def"',
|
||||
"process_name": "tensorrt.exe",
|
||||
"device_id": "cuda:0",
|
||||
"device_name": CUDA_DEVICES.get("cuda:0"),
|
||||
"test_name": "stable_diffusion_int8",
|
||||
"api": "tensorrt"
|
||||
"api": "tensorrt",
|
||||
},
|
||||
"NVIDIA_GPU_FP16": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_tensorrt.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sd15fp16_tensorrt.def"',
|
||||
"process_name": "tensorrt.exe",
|
||||
"device_id": "cuda:0",
|
||||
"device_name": CUDA_DEVICES.get("cuda:0"),
|
||||
"test_name": "stable_diffusion_fp16",
|
||||
"api": "tensorrt"
|
||||
"api": "tensorrt",
|
||||
},
|
||||
"NVIDIA_GPU_XL_FP16": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_tensorrt.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_imagegeneration_sdxlfp16_tensorrt.def"',
|
||||
"process_name": "tensorrt.exe",
|
||||
"device_id": "cuda:0",
|
||||
"device_name": CUDA_DEVICES.get("cuda:0"),
|
||||
"test_name": "stable_diffusion_fp16_xl",
|
||||
"api": "tensorrt"
|
||||
}
|
||||
"api": "tensorrt",
|
||||
},
|
||||
}
|
||||
|
||||
RESULTS_FILENAME = "result.xml"
|
||||
REPORT_PATH = LOG_DIR / RESULTS_FILENAME
|
||||
RESULTS_XML_PATH = LOG_DIR / RESULTS_FILENAME
|
||||
|
||||
|
||||
def setup_logging():
|
||||
"""setup logging"""
|
||||
setup_log_directory(str(LOG_DIR))
|
||||
logging.basicConfig(filename=LOG_DIR / "harness.log",
|
||||
format=DEFAULT_LOGGING_FORMAT,
|
||||
datefmt=DEFAULT_DATE_FORMAT,
|
||||
level=logging.DEBUG)
|
||||
logging.basicConfig(
|
||||
filename=LOG_DIR / "harness.log",
|
||||
format=DEFAULT_LOGGING_FORMAT,
|
||||
datefmt=DEFAULT_DATE_FORMAT,
|
||||
level=logging.DEBUG,
|
||||
)
|
||||
console = logging.StreamHandler()
|
||||
formatter = logging.Formatter(DEFAULT_LOGGING_FORMAT)
|
||||
console.setFormatter(formatter)
|
||||
logging.getLogger('').addHandler(console)
|
||||
logging.getLogger("").addHandler(console)
|
||||
|
||||
|
||||
def get_arguments():
|
||||
"""get arguments"""
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--engine", dest="engine", help="Engine test type", required=True,
|
||||
choices=BENCHMARK_CONFIG.keys())
|
||||
"--engine",
|
||||
dest="engine",
|
||||
help="Engine test type",
|
||||
required=True,
|
||||
choices=BENCHMARK_CONFIG.keys(),
|
||||
)
|
||||
argies = parser.parse_args()
|
||||
return argies
|
||||
|
||||
|
||||
def create_procyon_command(test_option, process_name, device_id):
|
||||
"""create command string"""
|
||||
command = f'\"{ABS_EXECUTABLE_PATH}\" --definition={test_option} --export=\"{REPORT_PATH}\"'
|
||||
command = f'"{ABS_EXECUTABLE_PATH}" --definition={test_option} --export="{RESULTS_XML_PATH}"'
|
||||
|
||||
match process_name:
|
||||
case 'ort-directml.exe':
|
||||
command = f'\"{ABS_EXECUTABLE_PATH}\" --definition={test_option} --export=\"{REPORT_PATH}\" --select-winml-device {device_id}'
|
||||
case 'openvino.exe':
|
||||
command = f'\"{ABS_EXECUTABLE_PATH}\" --definition={test_option} --export=\"{REPORT_PATH}\" --select-openvino-device {device_id}'
|
||||
case 'tensorrt.exe':
|
||||
command = f'\"{ABS_EXECUTABLE_PATH}\" --definition={test_option} --export=\"{REPORT_PATH}\" --select-cuda-device {device_id}'
|
||||
case "ort-directml.exe":
|
||||
command = f'"{ABS_EXECUTABLE_PATH}" --definition={test_option} --export="{RESULTS_XML_PATH}" --select-winml-device {device_id}'
|
||||
case "openvino.exe":
|
||||
command = f'"{ABS_EXECUTABLE_PATH}" --definition={test_option} --export="{RESULTS_XML_PATH}" --select-openvino-device {device_id}'
|
||||
case "tensorrt.exe":
|
||||
command = f'"{ABS_EXECUTABLE_PATH}" --definition={test_option} --export="{RESULTS_XML_PATH}" --select-cuda-device {device_id}'
|
||||
command = command.rstrip()
|
||||
|
||||
return command
|
||||
@@ -191,7 +212,12 @@ def create_procyon_command(test_option, process_name, device_id):
|
||||
|
||||
def run_benchmark(process_name, command_to_run):
|
||||
"""run the benchmark"""
|
||||
with subprocess.Popen(command_to_run, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) as proc:
|
||||
with subprocess.Popen(
|
||||
command_to_run,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
) as proc:
|
||||
logging.info("Procyon AI Image Generation benchmark has started.")
|
||||
while True:
|
||||
now = time.time()
|
||||
@@ -216,9 +242,11 @@ try:
|
||||
args = get_arguments()
|
||||
option = BENCHMARK_CONFIG[args.engine]["config"]
|
||||
cmd = create_procyon_command(
|
||||
option, BENCHMARK_CONFIG[args.engine]["process_name"],
|
||||
BENCHMARK_CONFIG[args.engine]["device_id"])
|
||||
logging.info('Starting benchmark!')
|
||||
option,
|
||||
BENCHMARK_CONFIG[args.engine]["process_name"],
|
||||
BENCHMARK_CONFIG[args.engine]["device_id"],
|
||||
)
|
||||
logging.info("Starting benchmark!")
|
||||
logging.info(cmd)
|
||||
start_time = time.time()
|
||||
pr = run_benchmark(BENCHMARK_CONFIG[args.engine]["process_name"], cmd)
|
||||
@@ -235,6 +263,9 @@ try:
|
||||
|
||||
end_time = time.time()
|
||||
elapsed_test_time = round(end_time - start_time, 2)
|
||||
am = ArtifactManager(LOG_DIR)
|
||||
am.copy_file(RESULTS_XML_PATH, ArtifactType.RESULTS_TEXT, "results xml file")
|
||||
am.create_manifest()
|
||||
logging.info("Benchmark took %.2f seconds", elapsed_test_time)
|
||||
logging.info("Score was %s", score)
|
||||
|
||||
@@ -248,8 +279,7 @@ try:
|
||||
"device_name": BENCHMARK_CONFIG[args.engine]["device_name"],
|
||||
"procyon_version": find_procyon_version(),
|
||||
"unit": "score",
|
||||
"score": score
|
||||
|
||||
"score": score,
|
||||
}
|
||||
|
||||
write_report_json(str(LOG_DIR), "report.json", report)
|
||||
|
||||
@@ -1,23 +1,32 @@
|
||||
"""UL Procyon AI Text Generation test script"""
|
||||
|
||||
# pylint: disable=no-name-in-module
|
||||
from argparse import ArgumentParser
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from argparse import ArgumentParser
|
||||
from pathlib import Path
|
||||
|
||||
import psutil
|
||||
from utils import regex_find_score_in_xml, is_process_running, get_install_path, find_procyon_version, find_test_version
|
||||
from utils import (
|
||||
find_procyon_version,
|
||||
find_test_version,
|
||||
get_install_path,
|
||||
is_process_running,
|
||||
regex_find_score_in_xml,
|
||||
)
|
||||
|
||||
PARENT_DIR = str(Path(sys.path[0], ".."))
|
||||
sys.path.append(PARENT_DIR)
|
||||
|
||||
from harness_utils.artifacts import ArtifactManager, ArtifactType
|
||||
from harness_utils.output import (
|
||||
DEFAULT_DATE_FORMAT,
|
||||
DEFAULT_LOGGING_FORMAT,
|
||||
seconds_to_milliseconds,
|
||||
setup_log_directory,
|
||||
write_report_json
|
||||
write_report_json,
|
||||
)
|
||||
|
||||
#####
|
||||
@@ -31,114 +40,125 @@ ABS_EXECUTABLE_PATH = DIR_PROCYON / EXECUTABLE
|
||||
CONFIG_DIR = SCRIPT_DIR / "config"
|
||||
BENCHMARK_CONFIG = {
|
||||
"All_Models_ONNX": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_all.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_textgeneration_all.def"',
|
||||
"process_name": "Handler.exe",
|
||||
"result_regex": r"<AIImageGenerationOverallScore>(\d+)",
|
||||
"test_name": "all_models",
|
||||
"api": "onnx"
|
||||
"api": "onnx",
|
||||
},
|
||||
"Llama_2_13B_ONNX": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_llama2.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_textgeneration_llama2.def"',
|
||||
"process_name": "Handler.exe",
|
||||
"result_regex": r"<AiTextGenerationLlama2OverallScore>(\d+)",
|
||||
"test_name": "llama_2_13b",
|
||||
"api": "onnx"
|
||||
"api": "onnx",
|
||||
},
|
||||
"Llama_3_1_8B_ONNX": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_llama3.1.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_textgeneration_llama3.1.def"',
|
||||
"process_name": "Handler.exe",
|
||||
"result_regex": r"<AiTextGenerationLlama3OverallScore>(\d+)",
|
||||
"test_name": "llama_3_1_8b",
|
||||
"api": "onnx"
|
||||
"api": "onnx",
|
||||
},
|
||||
"Mistral_7B_ONNX": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_mistral.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_textgeneration_mistral.def"',
|
||||
"process_name": "Handler.exe",
|
||||
"result_regex": r"<AiTextGenerationMistralOverallScore>(\d+)",
|
||||
"test_name": "mistral_7b",
|
||||
"api": "onnx"
|
||||
"api": "onnx",
|
||||
},
|
||||
"Phi_3_5_ONNX": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_phi.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_textgeneration_phi.def"',
|
||||
"process_name": "Handler.exe",
|
||||
"result_regex": r"<AiTextGenerationPhiOverallScore>(\d+)",
|
||||
"test_name": "phi_3_5",
|
||||
"api": "onnx"
|
||||
"api": "onnx",
|
||||
},
|
||||
"All_Models_OPENVINO": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_all_openvino.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_textgeneration_all_openvino.def"',
|
||||
"process_name": "Handler.exe",
|
||||
"result_regex": r"<AIImageGenerationOverallScore>(\d+)",
|
||||
"test_name": "all_models",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"Llama_2_13B_OPENVINO": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_llama2_openvino.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_textgeneration_llama2_openvino.def"',
|
||||
"process_name": "Handler.exe",
|
||||
"result_regex": r"<AiTextGenerationLlama2OverallScore>(\d+)",
|
||||
"test_name": "llama_2_13b",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"Llama_3_1_8B_OPENVINO": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_llama3.1_openvino.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_textgeneration_llama3.1_openvino.def"',
|
||||
"process_name": "Handler.exe",
|
||||
"result_regex": r"<AiTextGenerationLlama3OverallScore>(\d+)",
|
||||
"test_name": "llama_3_1_8b",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"Mistral_7B_OPENVINO": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_mistral_openvino.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_textgeneration_mistral_openvino.def"',
|
||||
"process_name": "Handler.exe",
|
||||
"result_regex": r"<AiTextGenerationMistralOverallScore>(\d+)",
|
||||
"test_name": "mistral_7b",
|
||||
"api": "openvino"
|
||||
"api": "openvino",
|
||||
},
|
||||
"Phi_3_5_OPENVINO": {
|
||||
"config": f"\"{CONFIG_DIR}\\ai_textgeneration_phi_openvino.def\"",
|
||||
"config": f'"{CONFIG_DIR}\\ai_textgeneration_phi_openvino.def"',
|
||||
"process_name": "Handler.exe",
|
||||
"result_regex": r"<AiTextGenerationPhiOverallScore>(\d+)",
|
||||
"test_name": "phi_3_5",
|
||||
"api": "openvino"
|
||||
}
|
||||
"api": "openvino",
|
||||
},
|
||||
}
|
||||
|
||||
RESULTS_FILENAME = "result.xml"
|
||||
REPORT_PATH = LOG_DIR / RESULTS_FILENAME
|
||||
RESULTS_XML_PATH = LOG_DIR / RESULTS_FILENAME
|
||||
|
||||
|
||||
def setup_logging():
|
||||
"""setup logging"""
|
||||
setup_log_directory(str(LOG_DIR))
|
||||
logging.basicConfig(filename=LOG_DIR / "harness.log",
|
||||
format=DEFAULT_LOGGING_FORMAT,
|
||||
datefmt=DEFAULT_DATE_FORMAT,
|
||||
level=logging.DEBUG)
|
||||
logging.basicConfig(
|
||||
filename=LOG_DIR / "harness.log",
|
||||
format=DEFAULT_LOGGING_FORMAT,
|
||||
datefmt=DEFAULT_DATE_FORMAT,
|
||||
level=logging.DEBUG,
|
||||
)
|
||||
console = logging.StreamHandler()
|
||||
formatter = logging.Formatter(DEFAULT_LOGGING_FORMAT)
|
||||
console.setFormatter(formatter)
|
||||
logging.getLogger('').addHandler(console)
|
||||
logging.getLogger("").addHandler(console)
|
||||
|
||||
|
||||
def get_arguments():
|
||||
"""get arguments"""
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--engine", dest="engine", help="Engine test type", required=True,
|
||||
choices=BENCHMARK_CONFIG.keys())
|
||||
"--engine",
|
||||
dest="engine",
|
||||
help="Engine test type",
|
||||
required=True,
|
||||
choices=BENCHMARK_CONFIG.keys(),
|
||||
)
|
||||
argies = parser.parse_args()
|
||||
return argies
|
||||
|
||||
|
||||
def create_procyon_command(test_option):
|
||||
"""create command string"""
|
||||
command = f'\"{ABS_EXECUTABLE_PATH}\" --definition={test_option} --export=\"{REPORT_PATH}\"'
|
||||
command = f'"{ABS_EXECUTABLE_PATH}" --definition={test_option} --export="{RESULTS_XML_PATH}"'
|
||||
command = command.rstrip()
|
||||
return command
|
||||
|
||||
|
||||
def run_benchmark(process_name, command_to_run):
|
||||
"""run the benchmark"""
|
||||
with subprocess.Popen(command_to_run, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) as proc:
|
||||
with subprocess.Popen(
|
||||
command_to_run,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
universal_newlines=True,
|
||||
) as proc:
|
||||
logging.info("Procyon AI Text Generation benchmark has started.")
|
||||
while True:
|
||||
now = time.time()
|
||||
@@ -159,7 +179,7 @@ try:
|
||||
args = get_arguments()
|
||||
option = BENCHMARK_CONFIG[args.engine]["config"]
|
||||
cmd = create_procyon_command(option)
|
||||
logging.info('Starting benchmark!')
|
||||
logging.info("Starting benchmark!")
|
||||
logging.info(cmd)
|
||||
start_time = time.time()
|
||||
pr = run_benchmark(BENCHMARK_CONFIG[args.engine]["process_name"], cmd)
|
||||
@@ -171,7 +191,13 @@ try:
|
||||
end_time = time.time()
|
||||
elapsed_test_time = round(end_time - start_time, 2)
|
||||
|
||||
if not args.engine == "All_Models_OPENVINO" and not args.engine == "All_Models_ONNX":
|
||||
am = ArtifactManager(LOG_DIR)
|
||||
am.copy_file(RESULTS_XML_PATH, ArtifactType.RESULTS_TEXT, "results xml file")
|
||||
am.create_manifest()
|
||||
if (
|
||||
not args.engine == "All_Models_OPENVINO"
|
||||
and not args.engine == "All_Models_ONNX"
|
||||
):
|
||||
results_regex = BENCHMARK_CONFIG[args.engine]["result_regex"]
|
||||
score = regex_find_score_in_xml(results_regex)
|
||||
|
||||
@@ -185,7 +211,7 @@ try:
|
||||
"unit": "score",
|
||||
"score": score,
|
||||
"start_time": seconds_to_milliseconds(start_time),
|
||||
"end_time": seconds_to_milliseconds(end_time)
|
||||
"end_time": seconds_to_milliseconds(end_time),
|
||||
}
|
||||
|
||||
logging.info("Benchmark took %.2f seconds", elapsed_test_time)
|
||||
@@ -198,10 +224,15 @@ try:
|
||||
logging.info("Benchmark took %.2f seconds", elapsed_test_time)
|
||||
|
||||
for test_type in BENCHMARK_CONFIG.items():
|
||||
if test_type[0] == "All_Models_ONNX" or test_type[0] == "All_Models_OPENVINO":
|
||||
if (
|
||||
test_type[0] == "All_Models_ONNX"
|
||||
or test_type[0] == "All_Models_OPENVINO"
|
||||
):
|
||||
continue
|
||||
|
||||
if ("ONNX" in args.engine and "ONNX" in test_type[0]) or ("OPENVINO" in args.engine and "OPENVINO" in test_type[0]):
|
||||
if ("ONNX" in args.engine and "ONNX" in test_type[0]) or (
|
||||
"OPENVINO" in args.engine and "OPENVINO" in test_type[0]
|
||||
):
|
||||
results_regex = test_type[1]["result_regex"]
|
||||
score = regex_find_score_in_xml(results_regex)
|
||||
|
||||
@@ -220,8 +251,7 @@ try:
|
||||
"test_version": find_test_version(),
|
||||
"procyon_version": find_procyon_version(),
|
||||
"unit": "score",
|
||||
"score": score
|
||||
|
||||
"score": score,
|
||||
}
|
||||
|
||||
session_report.append(report)
|
||||
|
||||
@@ -13,6 +13,7 @@ options:
|
||||
- premierepro
|
||||
- photoshop
|
||||
- aftereffects
|
||||
- lightroom
|
||||
- resolve
|
||||
tooltip: Select which test to run
|
||||
- name: benchmark_version
|
||||
|
||||
@@ -8,7 +8,7 @@ from argparse import ArgumentParser
|
||||
import time
|
||||
from subprocess import Popen, PIPE
|
||||
import threading
|
||||
from utils import find_latest_log, find_score_in_log, get_photoshop_version, get_premierepro_version, get_aftereffects_version, get_davinci_version, get_pugetbench_version, get_latest_benchmark_by_version
|
||||
from utils import find_latest_log, find_score_in_log, get_photoshop_version, get_premierepro_version, get_lightroom_version, get_aftereffects_version, get_davinci_version, get_pugetbench_version, get_latest_benchmark_by_version
|
||||
|
||||
sys.path.insert(1, os.path.join(sys.path[0], ".."))
|
||||
from harness_utils.process import terminate_processes
|
||||
@@ -74,6 +74,8 @@ def run_benchmark(application: str, app_version: str, benchmark_version: str):
|
||||
command = [executable_path] + command_args + ["--app", "photoshop"]
|
||||
elif application == "aftereffects":
|
||||
command = [executable_path] + command_args + ["--app", "aftereffects"]
|
||||
elif application == "lightroom":
|
||||
command = [executable_path] + command_args + ["--app", "lightroom"]
|
||||
elif application == "resolve":
|
||||
command = [executable_path] + command_args + ["--app", "resolve"]
|
||||
|
||||
@@ -122,6 +124,7 @@ def main():
|
||||
"premierepro",
|
||||
"photoshop",
|
||||
"aftereffects",
|
||||
"lightroom",
|
||||
"resolve"
|
||||
]
|
||||
|
||||
@@ -134,27 +137,36 @@ def main():
|
||||
|
||||
version = args.app_version
|
||||
score = 0
|
||||
full_version = None
|
||||
trimmed_version = None
|
||||
test = ""
|
||||
if args.app == "premierepro":
|
||||
test = "Adobe Premiere Pro"
|
||||
if version is None:
|
||||
version = get_premierepro_version()
|
||||
full_version, trimmed_version = get_premierepro_version()
|
||||
elif args.app == "photoshop":
|
||||
test = "Adobe Photoshop"
|
||||
if version is None:
|
||||
version = get_photoshop_version()
|
||||
full_version, trimmed_version = get_photoshop_version()
|
||||
elif args.app == "aftereffects":
|
||||
test = "Adobe After Effects"
|
||||
if version is None:
|
||||
version = get_aftereffects_version()
|
||||
full_version, trimmed_version = get_aftereffects_version()
|
||||
elif args.app == "lightroom":
|
||||
test = "Adobe Lightroom Classic"
|
||||
if version is None:
|
||||
full_version, trimmed_version = get_lightroom_version()
|
||||
elif args.app == "resolve":
|
||||
test = "Davinci Resolve Studio"
|
||||
if version is None:
|
||||
version = get_davinci_version() + "-studio"
|
||||
full_version, trimmed_version = get_davinci_version()
|
||||
if full_version and trimmed_version:
|
||||
full_version += "-studio"
|
||||
trimmed_version += "-studio"
|
||||
|
||||
try:
|
||||
start_time, end_time = run_benchmark(
|
||||
args.app, version, args.benchmark_version)
|
||||
args.app, trimmed_version, args.benchmark_version)
|
||||
log_file = find_latest_log()
|
||||
score = find_score_in_log(log_file)
|
||||
destination = Path(script_dir) / "run" / os.path.split(log_file)[1]
|
||||
@@ -165,7 +177,7 @@ def main():
|
||||
"end_time": seconds_to_milliseconds(end_time),
|
||||
"test": "PugetBench",
|
||||
"test_parameter": test,
|
||||
"app_version": version,
|
||||
"app_version": full_version,
|
||||
"benchmark_version": args.benchmark_version,
|
||||
"pugetbench_version": get_pugetbench_version(),
|
||||
"unit": "Score",
|
||||
|
||||
@@ -3,11 +3,12 @@ import re
|
||||
import os
|
||||
from pathlib import Path
|
||||
import win32api
|
||||
import csv
|
||||
|
||||
|
||||
def get_latest_benchmark_by_version(benchmark_name: str):
|
||||
"""Get the latest benchmark version, prioritizing beta if it's newer."""
|
||||
valid_names = ['photoshop', 'premierepro', 'aftereffects', 'resolve']
|
||||
valid_names = ['photoshop', 'premierepro', 'aftereffects', 'lightroom', 'resolve']
|
||||
if benchmark_name not in valid_names:
|
||||
raise ValueError("Invalid benchmark name")
|
||||
|
||||
@@ -60,34 +61,58 @@ def find_latest_log():
|
||||
|
||||
|
||||
def find_score_in_log(log_path):
|
||||
"""find score in pugetbench log file"""
|
||||
with open(log_path, 'r', encoding="utf-8") as file:
|
||||
for line in file:
|
||||
score = is_score_line(line)
|
||||
if score is not None:
|
||||
return score
|
||||
"""Return a single PugetBench overall score, preferring Standard > Extended > Basic."""
|
||||
scores = {}
|
||||
|
||||
with open(log_path, newline='', encoding="utf-8") as f:
|
||||
reader = csv.reader(f)
|
||||
|
||||
for row in reader:
|
||||
if not row:
|
||||
continue
|
||||
|
||||
label = row[0].strip()
|
||||
|
||||
# Only process rows that begin with "Overall Score"
|
||||
if not label.startswith("Overall Score"):
|
||||
continue
|
||||
|
||||
# Find the first numeric field
|
||||
for field in row:
|
||||
cleaned = field.replace(",", "").strip()
|
||||
if cleaned.isdigit():
|
||||
scores[label] = int(cleaned)
|
||||
break
|
||||
|
||||
# Priority order — return the first one found
|
||||
priority = [
|
||||
"Overall Score (Standard)",
|
||||
"Overall Score (Extended)",
|
||||
"Overall Score (Basic)",
|
||||
]
|
||||
|
||||
for key in priority:
|
||||
if key in scores:
|
||||
return scores[key]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def is_score_line(line):
|
||||
"""check if string is a score using regex"""
|
||||
regex_pattern = r"^Overall Score.+,+(\d+),+"
|
||||
match = re.search(regex_pattern, line)
|
||||
if match and len(match.groups()) > 0:
|
||||
return match.group(1)
|
||||
return None
|
||||
def get_photoshop_version() -> tuple[str, str]:
|
||||
"""Get the installed Adobe Photoshop version string, prioritizing Beta versions."""
|
||||
base_path = r"C:\Program Files\Adobe"
|
||||
|
||||
# Check if Adobe folder exists
|
||||
if not os.path.exists(base_path):
|
||||
print("Adobe directory not found.")
|
||||
return None
|
||||
|
||||
def get_photoshop_version() -> str:
|
||||
"""Get the current installed Adobe Premiere Pro version string."""
|
||||
base_path = "C:\\Program Files\\Adobe"
|
||||
|
||||
# Look for Adobe Premiere Pro folders
|
||||
# Look for Adobe Photoshop folders
|
||||
possible_versions = sorted(
|
||||
[d for d in os.listdir(base_path) if "Adobe Photoshop" in d],
|
||||
reverse=True # Prioritize newer versions
|
||||
)
|
||||
|
||||
|
||||
for folder in possible_versions:
|
||||
exe_path = os.path.join(base_path, folder, "Photoshop.exe")
|
||||
if os.path.exists(exe_path):
|
||||
@@ -96,13 +121,19 @@ def get_photoshop_version() -> str:
|
||||
exe_path, "\\VarFileInfo\\Translation"
|
||||
)[0]
|
||||
str_info_path = f"\\StringFileInfo\\{lang:04X}{codepage:04X}\\ProductVersion"
|
||||
return win32api.GetFileVersionInfo(exe_path, str_info_path)
|
||||
full_version = win32api.GetFileVersionInfo(exe_path, str_info_path)
|
||||
|
||||
# Trim to major.minor
|
||||
parts = full_version.split(".")
|
||||
major_minor = ".".join(parts[:2]) if len(parts) >= 2 else full_version
|
||||
|
||||
return full_version, major_minor
|
||||
except Exception as e:
|
||||
print(f"Error reading version from {exe_path}: {e}")
|
||||
|
||||
return None # No valid installation found
|
||||
|
||||
def get_aftereffects_version() -> str:
|
||||
return None, None
|
||||
|
||||
def get_aftereffects_version() -> tuple[str, str]:
|
||||
"""Get the installed Adobe After Effects version string, prioritizing Beta versions."""
|
||||
base_path = r"C:\Program Files\Adobe"
|
||||
|
||||
@@ -131,23 +162,34 @@ def get_aftereffects_version() -> str:
|
||||
if info:
|
||||
lang, codepage = info[0]
|
||||
str_info_path = f"\\StringFileInfo\\{lang:04X}{codepage:04X}\\ProductVersion"
|
||||
return str(win32api.GetFileVersionInfo(exe_path, str_info_path))
|
||||
full_version = str(win32api.GetFileVersionInfo(exe_path, str_info_path))
|
||||
|
||||
# Trim to major.minor
|
||||
parts = full_version.split(".")
|
||||
major_minor = ".".join(parts[:2]) if len(parts) >= 2 else full_version
|
||||
|
||||
return full_version, major_minor
|
||||
except Exception as e:
|
||||
print(f"Error reading version from {exe_path}: {e}")
|
||||
|
||||
return None # No valid installation found
|
||||
return None, None
|
||||
|
||||
|
||||
def get_premierepro_version() -> str:
|
||||
def get_premierepro_version() -> tuple[str, str]:
|
||||
"""Get the current installed Adobe Premiere Pro version string."""
|
||||
base_path = "C:\\Program Files\\Adobe"
|
||||
|
||||
base_path = r"C:\Program Files\Adobe"
|
||||
|
||||
# Check if Adobe folder exists
|
||||
if not os.path.exists(base_path):
|
||||
print("Adobe directory not found.")
|
||||
return None
|
||||
|
||||
# Look for Adobe Premiere Pro folders
|
||||
possible_versions = sorted(
|
||||
[d for d in os.listdir(base_path) if "Adobe Premiere Pro" in d],
|
||||
reverse=True # Prioritize newer versions
|
||||
)
|
||||
|
||||
|
||||
for folder in possible_versions:
|
||||
exe_path = os.path.join(base_path, folder, "Adobe Premiere Pro.exe")
|
||||
if os.path.exists(exe_path):
|
||||
@@ -156,27 +198,80 @@ def get_premierepro_version() -> str:
|
||||
exe_path, "\\VarFileInfo\\Translation"
|
||||
)[0]
|
||||
str_info_path = f"\\StringFileInfo\\{lang:04X}{codepage:04X}\\ProductVersion"
|
||||
return win32api.GetFileVersionInfo(exe_path, str_info_path)
|
||||
full_version = win32api.GetFileVersionInfo(exe_path, str_info_path)
|
||||
|
||||
# Trim to major.minor
|
||||
parts = full_version.split(".")
|
||||
major_minor = ".".join(parts[:2]) if len(parts) >= 2 else full_version
|
||||
|
||||
return full_version, major_minor
|
||||
except Exception as e:
|
||||
print(f"Error reading version from {exe_path}: {e}")
|
||||
|
||||
return None # No valid installation found
|
||||
|
||||
return None, None
|
||||
|
||||
def get_lightroom_version() -> tuple[str, str]:
|
||||
"""Get the current installed Adobe Lightroom Classic version string."""
|
||||
base_path = r"C:\Program Files\Adobe"
|
||||
|
||||
# Check if Adobe folder exists
|
||||
if not os.path.exists(base_path):
|
||||
print("Adobe directory not found.")
|
||||
return None
|
||||
|
||||
# Look for Adobe Lightroom Classic folders
|
||||
possible_versions = sorted(
|
||||
[d for d in os.listdir(base_path) if "Adobe Lightroom Classic" in d],
|
||||
reverse=True # Prioritize newer versions
|
||||
)
|
||||
|
||||
for folder in possible_versions:
|
||||
exe_path = os.path.join(base_path, folder, "Lightroom.exe")
|
||||
if os.path.exists(exe_path):
|
||||
try:
|
||||
lang, codepage = win32api.GetFileVersionInfo(
|
||||
exe_path, "\\VarFileInfo\\Translation"
|
||||
)[0]
|
||||
str_info_path = f"\\StringFileInfo\\{lang:04X}{codepage:04X}\\ProductVersion"
|
||||
full_version = win32api.GetFileVersionInfo(exe_path, str_info_path)
|
||||
|
||||
# Trim to major.minor
|
||||
parts = full_version.split(".")
|
||||
major_minor = ".".join(parts[:2]) if len(parts) >= 2 else full_version
|
||||
|
||||
return full_version, major_minor
|
||||
except Exception as e:
|
||||
print(f"Error reading version from {exe_path}: {e}")
|
||||
|
||||
return None, None
|
||||
|
||||
|
||||
def get_davinci_version() -> str:
|
||||
"""get current photoshop version string"""
|
||||
path = "C:\\Program Files\\Blackmagic Design\\DaVinci Resolve\\Resolve.exe"
|
||||
def get_davinci_version() -> tuple[str, str]:
|
||||
"""Get the current installed Davinci Resolve Studio version string."""
|
||||
path = r"C:\Program Files\Blackmagic Design\DaVinci Resolve\Resolve.exe"
|
||||
if not os.path.exists(path):
|
||||
print("DaVinci Resolve executable not found.")
|
||||
return None, None
|
||||
|
||||
try:
|
||||
lang, codepage = win32api.GetFileVersionInfo(
|
||||
path, "\\VarFileInfo\\Translation")[0]
|
||||
path, "\\VarFileInfo\\Translation"
|
||||
)[0]
|
||||
str_info_path = f"\\StringFileInfo\\{lang:04X}{codepage:04X}\\ProductVersion"
|
||||
return win32api.GetFileVersionInfo(path, str_info_path)
|
||||
full_version = win32api.GetFileVersionInfo(path, str_info_path)
|
||||
|
||||
# Trim to major.minor
|
||||
parts = full_version.split(".")
|
||||
major_minor = ".".join(parts[:2]) if len(parts) >= 2 else full_version
|
||||
|
||||
return full_version, major_minor
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
print(f"Error reading version from {path}: {e}")
|
||||
return None, None
|
||||
|
||||
def get_pugetbench_version() -> str:
|
||||
"""get current premiere pro version string"""
|
||||
"""Get the current installed PugetBench version string."""
|
||||
path = "C:\\Program Files\\PugetBench for Creators\\PugetBench for Creators.exe"
|
||||
try:
|
||||
lang, codepage = win32api.GetFileVersionInfo(
|
||||
@@ -185,4 +280,4 @@ def get_pugetbench_version() -> str:
|
||||
return win32api.GetFileVersionInfo(path, str_info_path)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return None
|
||||
return None
|
||||
|
||||
@@ -77,7 +77,7 @@ def copy_benchmarkfiles() -> None:
|
||||
|
||||
def copy_save_from_network_drive(file_name, destination):
|
||||
"""copy save file from network drive"""
|
||||
network_dir = Path("\\\\labs.lmg.gg\\labs.lmg.gg\\03_ProcessingFiles\\Stellaris")
|
||||
network_dir = Path("\\\\labs.lmg.gg\\labs\\03_ProcessingFiles\\Stellaris")
|
||||
source_path = network_dir.joinpath(file_name)
|
||||
logging.info("Copying %s from %s", file_name, source_path)
|
||||
shutil.copyfile(source_path, destination)
|
||||
|
||||
Reference in New Issue
Block a user