mirror of
https://github.com/LTTLabsOSS/markbench-tests.git
synced 2026-01-09 14:07:56 -05:00
fixed some linter issues
This commit is contained in:
@@ -134,6 +134,8 @@ def get_arguments():
|
||||
|
||||
def create_procyon_command(test_option, process_name, device_id):
|
||||
"""create command string"""
|
||||
command = str()
|
||||
|
||||
if device_id == 'CPU':
|
||||
command = f'\"{ABS_EXECUTABLE_PATH}\" --definition={test_option} --export=\"{REPORT_PATH}\"'
|
||||
else:
|
||||
@@ -152,7 +154,6 @@ def run_benchmark(process_name, command_to_run):
|
||||
"""run the benchmark"""
|
||||
with subprocess.Popen(command_to_run, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) as proc:
|
||||
logging.info("Procyon AI Computer Vision benchmark has started.")
|
||||
start_time = time.time()
|
||||
while True:
|
||||
now = time.time()
|
||||
elapsed = now - start_time
|
||||
@@ -168,15 +169,15 @@ def run_benchmark(process_name, command_to_run):
|
||||
|
||||
try:
|
||||
setup_logging()
|
||||
logging.info(f"Detected Windows ML Devices: {WINML_DEVICES}")
|
||||
logging.info(f"Detected OpenVino Devices: {OPENVINO_DEVICES}")
|
||||
logging.info(f"Detected CUDA Devices: {CUDA_DEVICES}")
|
||||
logging.info("Detected Windows ML Devices: %s", str(WINML_DEVICES))
|
||||
logging.info("Detected OpenVino Devices: %s", str(OPENVINO_DEVICES))
|
||||
logging.info("Detected CUDA Devices: %s", (CUDA_DEVICES))
|
||||
|
||||
args = get_arguments()
|
||||
option = BENCHMARK_CONFIG[args.engine]["config"]
|
||||
process_name = BENCHMARK_CONFIG[args.engine]["process_name"]
|
||||
device_id = BENCHMARK_CONFIG[args.engine]["device_id"]
|
||||
cmd = create_procyon_command(option, process_name, device_id)
|
||||
proc_name = BENCHMARK_CONFIG[args.engine]["process_name"]
|
||||
dev_id = BENCHMARK_CONFIG[args.engine]["device_id"]
|
||||
cmd = create_procyon_command(option, proc_name, dev_id)
|
||||
logging.info('Starting benchmark!')
|
||||
logging.info(cmd)
|
||||
start_time = time.time()
|
||||
|
||||
@@ -176,7 +176,6 @@ def run_benchmark(process_name, command_to_run):
|
||||
"""run the benchmark"""
|
||||
with subprocess.Popen(command_to_run, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) as proc:
|
||||
logging.info("Procyon AI Image Generation benchmark has started.")
|
||||
start_time = time.time()
|
||||
while True:
|
||||
now = time.time()
|
||||
elapsed = now - start_time
|
||||
@@ -192,9 +191,9 @@ def run_benchmark(process_name, command_to_run):
|
||||
|
||||
try:
|
||||
setup_logging()
|
||||
logging.info(f"Detected Windows ML Devices: {WINML_DEVICES}")
|
||||
logging.info(f"Detected OpenVino Devices: {OPENVINO_DEVICES}")
|
||||
logging.info(f"Detected CUDA Devices: {CUDA_DEVICES}")
|
||||
logging.info("Detected Windows ML Devices: %s", str(WINML_DEVICES))
|
||||
logging.info("Detected OpenVino Devices: %s", str(OPENVINO_DEVICES))
|
||||
logging.info("Detected CUDA Devices: %s", (CUDA_DEVICES))
|
||||
|
||||
args = get_arguments()
|
||||
option = BENCHMARK_CONFIG[args.engine]["config"]
|
||||
@@ -209,6 +208,7 @@ try:
|
||||
sys.exit(pr.returncode)
|
||||
|
||||
score = find_score_in_xml()
|
||||
|
||||
if score is None:
|
||||
logging.error("Could not find overall score!")
|
||||
sys.exit(1)
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"""3dmark test utils"""
|
||||
from pathlib import Path
|
||||
import psutil
|
||||
import xml.etree.ElementTree as ET
|
||||
import winreg
|
||||
import re
|
||||
|
||||
|
||||
@@ -126,7 +126,6 @@ def run_benchmark(process_name, command_to_run):
|
||||
"""run the benchmark"""
|
||||
with subprocess.Popen(command_to_run, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True) as proc:
|
||||
logging.info("Procyon AI Text Generation benchmark has started.")
|
||||
start_time = time.time()
|
||||
while True:
|
||||
now = time.time()
|
||||
elapsed = now - start_time
|
||||
@@ -161,7 +160,7 @@ try:
|
||||
|
||||
end_time = time.time()
|
||||
elapsed_test_time = round(end_time - start_time, 2)
|
||||
|
||||
|
||||
if not args.engine == "All_Models_OPENVINO" and not args.engine == "All_Models_ONNX":
|
||||
report = {
|
||||
"test": BENCHMARK_CONFIG[args.engine]["test_name"],
|
||||
@@ -203,7 +202,7 @@ try:
|
||||
session_report.append(report)
|
||||
|
||||
write_report_json(LOG_DIR, "report.json", session_report)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logging.error("Something went wrong running the benchmark!")
|
||||
logging.exception(e)
|
||||
|
||||
Reference in New Issue
Block a user