Non game continued (#144)

This commit is contained in:
j-lin-lmg
2025-05-28 16:23:46 -07:00
committed by GitHub
parent 022fb0a979
commit 80a8a2b9b2
5 changed files with 34 additions and 17 deletions

View File

@@ -117,10 +117,12 @@ for report in json_array:
scene_report = { scene_report = {
"timestamp": report['timestamp'], "timestamp": report['timestamp'],
"version": blender_version, "version": blender_version,
"test": f"Blender Benchmark {report['scene']['label']} {DEVICE_TYPE}", "test": "Blender Benchmark",
"test_parameter": f"{report['scene']['label']} ",
"score": round(report['stats']['samples_per_minute'], 2), "score": round(report['stats']['samples_per_minute'], 2),
"unit": "samples per minute", "unit": "samples per minute",
"device": report['device_info']['compute_devices'][0]['name'] "device": report['device_info']['compute_devices'][0]['name'],
"device_type": DEVICE_TYPE,
} }
logging.info(json.dumps(scene_report, indent=2)) logging.info(json.dumps(scene_report, indent=2))

View File

@@ -33,7 +33,8 @@ DURATION_OPTION = "g_CinebenchMinimumTestDuration=1"
parser = ArgumentParser() parser = ArgumentParser()
parser.add_argument( parser.add_argument(
"-t", "--test", dest="test", help="Cinebench test type", required=True, choices=TEST_OPTIONS.keys()) "-t", "--test", dest="test", help="Cinebench test type", required=True,
choices=TEST_OPTIONS.keys())
args = parser.parse_args() args = parser.parse_args()
script_dir = Path(__file__).resolve().parent script_dir = Path(__file__).resolve().parent
@@ -63,21 +64,30 @@ try:
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
bufsize=1, bufsize=1,
universal_newlines=True) as proc: universal_newlines=True) as proc:
logging.info("Cinebench started. Waiting for setup to finish to set process priority.") logging.info(
"Cinebench started. Waiting for setup to finish to set process priority.")
START_TIME = 0
if proc.stdout is None:
logging.error("Cinebench process did not start correctly!")
sys.exit(1)
for line in proc.stdout: for line in proc.stdout:
if "BEFORERENDERING" in line: if "BEFORERENDERING" in line:
elapsed_setup_time = round(time.time() - setup_start_time, 2) elapsed_setup_time = round(
time.time() - setup_start_time, 2)
logging.info("Setup took %.2f seconds", elapsed_setup_time) logging.info("Setup took %.2f seconds", elapsed_setup_time)
logging.info("Setting Cinebench process priority to high (PID: %s)", proc.pid) logging.info(
"Setting Cinebench process priority to high (PID: %s)",
proc.pid)
process = psutil.Process(proc.pid) process = psutil.Process(proc.pid)
process.nice(psutil.HIGH_PRIORITY_CLASS) process.nice(psutil.HIGH_PRIORITY_CLASS)
start_time = time.time() START_TIME = time.time()
break break
out, _ = proc.communicate() out, _ = proc.communicate()
if proc.returncode > 0: if proc.returncode > 0:
logging.warning("Cinebench exited with return code %d", proc.returncode) logging.warning(
"Cinebench exited with return code %d", proc.returncode)
score = get_score(out) score = get_score(out)
if score is None: if score is None:
@@ -85,19 +95,20 @@ try:
sys.exit(1) sys.exit(1)
end_time = time.time() end_time = time.time()
elapsed_test_time = round(end_time - start_time, 2) elapsed_test_time = round(end_time - START_TIME, 2)
logging.info("Benchmark took %.2f seconds", elapsed_test_time) logging.info("Benchmark took %.2f seconds", elapsed_test_time)
report = { report = {
"test": friendly_test_name(test_type), "test": "Cinebench 2024",
"test_parameter": friendly_test_name(test_type),
"score": score, "score": score,
"unit": "score", "unit": "score",
"start_time": seconds_to_milliseconds(start_time), "start_time": seconds_to_milliseconds(START_TIME),
"end_time": seconds_to_milliseconds(end_time) "end_time": seconds_to_milliseconds(end_time)
} }
session_report.append(report) session_report.append(report)
write_report_json(log_dir, "report.json", session_report) write_report_json(str(log_dir), "report.json", session_report)
except Exception as e: except Exception as e:
logging.error("Something went wrong running the benchmark!") logging.error("Something went wrong running the benchmark!")
logging.exception(e) logging.exception(e)

View File

@@ -77,6 +77,7 @@ formatter = logging.Formatter(DEFAULT_LOGGING_FORMAT)
console.setFormatter(formatter) console.setFormatter(formatter)
logging.getLogger("").addHandler(console) logging.getLogger("").addHandler(console)
def main(): def main():
"""entrypoint""" """entrypoint"""
parser = ArgumentParser() parser = ArgumentParser()
@@ -133,7 +134,8 @@ def main():
end_time = current_time_ms() end_time = current_time_ms()
report = { report = {
"test": f"HandBrake Encoding BBB {args.encoder.upper()}", "test": "HandBrake Encoding",
"test_parameter": f"{ENCODER_TO_PRESET[args.encoder]['name']}",
"score": score, "score": score,
"unit": "frames per second", "unit": "frames per second",
"version": "1.9.1", "version": "1.9.1",
@@ -141,11 +143,12 @@ def main():
"end_time": end_time "end_time": end_time
} }
write_report_json(LOG_DIR, "report.json", report) write_report_json(str(LOG_DIR), "report.json", report)
except Exception as e: except Exception as e:
logging.error("Something went wrong running the benchmark!") logging.error("Something went wrong running the benchmark!")
logging.exception(e) logging.exception(e)
sys.exit(1) sys.exit(1)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -15,6 +15,6 @@ options:
- aftereffects - aftereffects
- resolve - resolve
tooltip: Select which test to run tooltip: Select which test to run
- name: benchmark - name: benchmark_version
type: input type: input
tooltip: Version of benchmark to run tooltip: Version of benchmark to run

View File

@@ -70,7 +70,8 @@ with open(log_path, encoding="utf-8") as log:
report = { report = {
"test": "Unigine Superposition", "test": "Unigine Superposition",
"test_parameter": f"{args.api} {args.preset}", "test_parameter": f"{args.api}",
"test_preset": args.preset,
"score": SCORE, "score": SCORE,
"unit": "score" "unit": "score"
} }