diff --git a/3dmark/ul3dmark.py b/3dmark/ul3dmark.py index 21f8c85..e5eb82b 100644 --- a/3dmark/ul3dmark.py +++ b/3dmark/ul3dmark.py @@ -20,7 +20,7 @@ from harness_utils.output import ( ) ##### -### Globals +# Globals ##### SCRIPT_DIR = Path(__file__).resolve().parent LOG_DIR = SCRIPT_DIR / "run" @@ -31,25 +31,25 @@ CONFIG_DIR = SCRIPT_DIR / "config" BENCHMARK_CONFIG = { "TimeSpy": { "config": CONFIG_DIR / "timespy.3dmdef", - "process_name": "3DMarkTimeSpy.exe", + "process_name": "3DMarkTimeSpy.exe", "score_name": "TimeSpyPerformanceGraphicsScore", "test_name": "3DMark Time Spy" }, "FireStrike": { "config": CONFIG_DIR / "firestrike.3dmdef", - "process_name": "3DMarkICFWorkload.exe", + "process_name": "3DMarkICFWorkload.exe", "score_name": "firestrikegraphicsscorep", "test_name": "3DMark Fire Strike" }, "PortRoyal": { "config": CONFIG_DIR / "portroyal.3dmdef", - "process_name": "3DMarkPortRoyal.exe", + "process_name": "3DMarkPortRoyal.exe", "score_name": "PortRoyalPerformanceGraphicsScore", "test_name": "3DMark Port Royal" }, "SolarBay": { "config": CONFIG_DIR / "solarbay.3dmdef", - "process_name": "3DMarkSolarBay.exe", + "process_name": "3DMarkSolarBay.exe", "score_name": "SolarBayPerformanceGraphicsScore", "test_name": "3DMark Solar Bay" } @@ -57,9 +57,10 @@ BENCHMARK_CONFIG = { RESULTS_FILENAME = "myresults.xml" REPORT_PATH = LOG_DIR / RESULTS_FILENAME + def setup_logging(): """setup logging""" - setup_log_directory(LOG_DIR) + setup_log_directory(str(LOG_DIR)) logging.basicConfig(filename=LOG_DIR / "harness.log", format=DEFAULT_LOGGING_FORMAT, datefmt=DEFAULT_DATE_FORMAT, @@ -73,8 +74,9 @@ def setup_logging(): def get_arguments(): """get arguments""" parser = ArgumentParser() - parser.add_argument( - "--benchmark", dest="benchmark", help="Benchmark test type", required=True, choices=BENCHMARK_CONFIG.keys()) + parser.add_argument("--benchmark", dest="benchmark", + help="Benchmark test type", required=True, + choices=BENCHMARK_CONFIG.keys()) argies = parser.parse_args() return argies @@ -94,16 +96,17 @@ def run_benchmark(process_name, command_to_run): while True: now = time.time() elapsed = now - start_time - if elapsed >= 30: #seconds + if elapsed >= 30: # seconds raise ValueError("BenchMark subprocess did not start in time") process = is_process_running(process_name) if process is not None: process.nice(psutil.HIGH_PRIORITY_CLASS) break time.sleep(0.2) - _, _ = proc.communicate() # blocks until 3dmark exits + _, _ = proc.communicate() # blocks until 3dmark exits return proc + try: setup_logging() args = get_arguments() @@ -118,7 +121,9 @@ try: logging.error("3DMark exited with return code %d", pr.returncode) sys.exit(pr.returncode) - score = get_score(BENCHMARK_CONFIG[args.benchmark]["score_name"], REPORT_PATH) + score = get_score( + BENCHMARK_CONFIG[args.benchmark]["score_name"], + REPORT_PATH) if score is None: logging.error("Could not find average FPS output!") sys.exit(1) @@ -129,7 +134,8 @@ try: logging.info("Score was %s", score) report = { - "test": BENCHMARK_CONFIG[args.benchmark]["test_name"], + "test": "3DMark", + "test_parameter": args.benchmark, "unit": "score", "score": score, "start_time": seconds_to_milliseconds(strt), diff --git a/blender_render/blender.py b/blender_render/blender.py index 3b8f6b5..63b7e3a 100644 --- a/blender_render/blender.py +++ b/blender_render/blender.py @@ -3,17 +3,17 @@ from pathlib import Path from blender_utils import BENCHMARK_CONFIG, find_blender, run_blender_render, download_scene from argparse import ArgumentParser import logging -import os.path import sys import time -sys.path.insert(1, os.path.join(sys.path[0], '..')) +sys.path.insert(1, str(Path(sys.path[0]).parent)) from harness_utils.output import DEFAULT_DATE_FORMAT, DEFAULT_LOGGING_FORMAT, write_report_json, seconds_to_milliseconds SCRIPT_DIR = Path(__file__).resolve().parent LOG_DIR = SCRIPT_DIR.joinpath("run") + def setup_logging(): """default logging config""" LOG_DIR.mkdir(exist_ok=True) @@ -26,15 +26,18 @@ def setup_logging(): console.setFormatter(formatter) logging.getLogger('').addHandler(console) + VALID_DEVICES = ["CPU", "CUDA", "OPTIX", "HIP", "ONEAPI", "METAL"] + def main(): """entry point for test script""" parser = ArgumentParser() parser.add_argument("-d", "--device", dest="device", help="device", metavar="device", required=True) parser.add_argument( - "--benchmark", dest="benchmark", help="Benchmark test type", metavar="benchmark", required=True) + "--benchmark", dest="benchmark", help="Benchmark test type", + metavar="benchmark", required=True) args = parser.parse_args() if args.device not in VALID_DEVICES: raise Exception(f"invalid device selection: {args.device}") @@ -49,23 +52,25 @@ def main(): score = run_blender_render( executable_path, LOG_DIR, args.device.upper(), benchmark) end_time = time.time() - logging.info(f'Finished rendering {args.benchmark} in %d seconds', (end_time - start_time)) + logging.info( + f'Finished rendering {args.benchmark} in %d seconds', + (end_time - start_time)) if score is None: raise Exception("no duration was found in the log to use as the score") report = { - "test": f"Blender {args.benchmark} Render {args.device.upper()}", + "test": "Blender Render", + "test_parameter": args.benchmark, "score": score, "unit": "seconds", "version": version, "device": args.device, - "benchmark": args.benchmark, "start_time": seconds_to_milliseconds(start_time), "end_time": seconds_to_milliseconds(end_time) } - write_report_json(LOG_DIR, "report.json", report) + write_report_json(str(LOG_DIR), "report.json", report) if __name__ == "__main__": diff --git a/gravitymark/gravitymark.py b/gravitymark/gravitymark.py index 6eee1ec..2edb6ce 100644 --- a/gravitymark/gravitymark.py +++ b/gravitymark/gravitymark.py @@ -4,7 +4,7 @@ import getpass import subprocess import sys from pathlib import Path -from gravitymark_utils import friendly_test_name, get_args, get_score, create_gravitymark_command +from gravitymark_utils import friendly_test_param, get_args, get_score, create_gravitymark_command PARENT_DIR = str(Path(sys.path[0], "..")) sys.path.append(PARENT_DIR) @@ -19,7 +19,7 @@ GRAVITYMARK_PATH = Path("C:/", "Program Files", "GravityMark", "bin") GRAVITYMARK_EXE = GRAVITYMARK_PATH / "GravityMark.exe" args = get_args() -api = f"-{args.api}" +API = f"-{args.api}" script_dir = Path(__file__).resolve().parent log_dir = script_dir / "run" @@ -36,9 +36,11 @@ formatter = logging.Formatter(DEFAULT_LOGGING_FORMAT) console.setFormatter(formatter) logging.getLogger("").addHandler(console) -gravitymark_log_path = Path("C:/Users", getpass.getuser(), ".GravityMark", "GravityMark.log") +gravitymark_log_path = Path( + "C:/Users", getpass.getuser(), + ".GravityMark", "GravityMark.log") image_path = log_dir / "result.png" -command = create_gravitymark_command(GRAVITYMARK_EXE, api, image_path) +command = create_gravitymark_command(GRAVITYMARK_EXE, API, image_path) try: logging.info('Starting benchmark!') @@ -47,7 +49,8 @@ try: result = subprocess.run(command, check=True, cwd=GRAVITYMARK_PATH) if result.returncode > 0: - logging.error("GravityMark exited with return code %d", result.returncode) + logging.error("GravityMark exited with return code %d", + result.returncode) sys.exit(1) score = get_score(gravitymark_log_path) @@ -57,12 +60,13 @@ try: sys.exit(1) report = { - "test": friendly_test_name(args.api), + "test": "GravityMark", + "test_name": friendly_test_param(args.api), "score": score, "unit": "score" } - write_report_json(log_dir, "report.json", report) + write_report_json(str(log_dir), "report.json", report) except Exception as e: logging.error("Something went wrong running the benchmark!") logging.exception(e) diff --git a/gravitymark/gravitymark_utils.py b/gravitymark/gravitymark_utils.py index 3a0d0b8..899bebf 100644 --- a/gravitymark/gravitymark_utils.py +++ b/gravitymark/gravitymark_utils.py @@ -23,16 +23,16 @@ CLI_OPTIONS = { "-status": "1" } -def friendly_test_name(api: str) -> str: +def friendly_test_param(api: str) -> str: """return a friendlier string given the API harness argument""" if api == "vulkan": - return "GravityMark Vulkan" + return "Vulkan" if api == "opengl": - return "GravityMark OpenGL" + return "OpenGL" if api == "direct3d12": - return "GravityMark DX12" + return "DX12" if api == "direct3d11": - return "GravityMark DX11" + return "DX11" return api def get_args() -> Namespace: diff --git a/harness_utils/output.py b/harness_utils/output.py index 858ddb2..8a51c97 100644 --- a/harness_utils/output.py +++ b/harness_utils/output.py @@ -13,7 +13,9 @@ def setup_log_directory(log_dir: str) -> None: os.mkdir(log_dir) -def write_report_json(log_dir: str, report_name: str, report_json: any) -> None: +# change in future, this any bothers me, should be dict +def write_report_json( + log_dir: str, report_name: str, report_json: any) -> None: """Writes the json output of a harness to the log directory""" with open(os.path.join(log_dir, report_name), "w", encoding="utf-8") as file: file.write(json.dumps(report_json)) diff --git a/deprecated/F1_22/README.md b/zdeprecated/F1_22/README.md similarity index 100% rename from deprecated/F1_22/README.md rename to zdeprecated/F1_22/README.md diff --git a/deprecated/F1_22/f1.py b/zdeprecated/F1_22/f1.py similarity index 100% rename from deprecated/F1_22/f1.py rename to zdeprecated/F1_22/f1.py diff --git a/deprecated/F1_22/f1_22_utils.py b/zdeprecated/F1_22/f1_22_utils.py similarity index 100% rename from deprecated/F1_22/f1_22_utils.py rename to zdeprecated/F1_22/f1_22_utils.py diff --git a/deprecated/F1_22/manifest.yaml b/zdeprecated/F1_22/manifest.yaml similarity index 100% rename from deprecated/F1_22/manifest.yaml rename to zdeprecated/F1_22/manifest.yaml diff --git a/F1_23/README.md b/zdeprecated/F1_23/README.md similarity index 100% rename from F1_23/README.md rename to zdeprecated/F1_23/README.md diff --git a/F1_23/f1_23.py b/zdeprecated/F1_23/f1_23.py similarity index 100% rename from F1_23/f1_23.py rename to zdeprecated/F1_23/f1_23.py diff --git a/F1_23/f1_23_utils.py b/zdeprecated/F1_23/f1_23_utils.py similarity index 100% rename from F1_23/f1_23_utils.py rename to zdeprecated/F1_23/f1_23_utils.py diff --git a/F1_23/manifest.yaml b/zdeprecated/F1_23/manifest.yaml similarity index 100% rename from F1_23/manifest.yaml rename to zdeprecated/F1_23/manifest.yaml diff --git a/deprecated/README.md b/zdeprecated/README.md similarity index 100% rename from deprecated/README.md rename to zdeprecated/README.md diff --git a/deprecated/aida64gpgpu/README.md b/zdeprecated/aida64gpgpu/README.md similarity index 100% rename from deprecated/aida64gpgpu/README.md rename to zdeprecated/aida64gpgpu/README.md diff --git a/deprecated/aida64gpgpu/aida64_gpgpu_benchmark.png b/zdeprecated/aida64gpgpu/aida64_gpgpu_benchmark.png similarity index 100% rename from deprecated/aida64gpgpu/aida64_gpgpu_benchmark.png rename to zdeprecated/aida64gpgpu/aida64_gpgpu_benchmark.png diff --git a/deprecated/aida64gpgpu/aida64gpgpu.py b/zdeprecated/aida64gpgpu/aida64gpgpu.py similarity index 90% rename from deprecated/aida64gpgpu/aida64gpgpu.py rename to zdeprecated/aida64gpgpu/aida64gpgpu.py index ef5dd8f..f36d370 100644 --- a/deprecated/aida64gpgpu/aida64gpgpu.py +++ b/zdeprecated/aida64gpgpu/aida64gpgpu.py @@ -23,8 +23,8 @@ logging.getLogger('').addHandler(console) executable = os.path.join(INSTALL_DIR, EXECUTABLE) report_dest = os.path.join(log_dir, "report.xml") -argstr = f"/GGBENCH {report_dest}" -result = subprocess.run([executable, "/GGBENCH", report_dest], check=False) +ARGSTR = f"/GGBENCH {report_dest}" +result = subprocess.run([executable, ARGSTR], check=False) if result.returncode > 0: logging.error("Aida failed with exit code {result.returncode}") diff --git a/deprecated/aida64gpgpu/manifest.yaml b/zdeprecated/aida64gpgpu/manifest.yaml similarity index 100% rename from deprecated/aida64gpgpu/manifest.yaml rename to zdeprecated/aida64gpgpu/manifest.yaml diff --git a/deprecated/aida64gpgpu/sample_gpgpu.xml b/zdeprecated/aida64gpgpu/sample_gpgpu.xml similarity index 100% rename from deprecated/aida64gpgpu/sample_gpgpu.xml rename to zdeprecated/aida64gpgpu/sample_gpgpu.xml diff --git a/zz_non_game_harness_template/harness.py b/zz_non_game_harness_template/harness.py new file mode 100644 index 0000000..eaa6f9f --- /dev/null +++ b/zz_non_game_harness_template/harness.py @@ -0,0 +1 @@ +# This is a non-game harness template