mirror of
https://github.com/JHUAPL/kaiju.git
synced 2026-01-09 15:17:56 -05:00
Merged in ewinter-derecho_testing (pull request #18)
Many fixes and upgrades to testing infrastructure on derecho Approved-by: Jeff Approved-by: ksorathia
This commit is contained in:
@@ -33,7 +33,7 @@
|
||||
<REMIX>
|
||||
<grid Np="720" Nt="90" LowLatBoundary="45.0"/>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="T"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="F"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="F"/>
|
||||
</REMIX>
|
||||
<RCM>
|
||||
<ellipse xSun="12.5" yDD="15.0" xTail="-15.0" isDynamic="T"/>
|
||||
|
||||
@@ -33,7 +33,7 @@ import sys
|
||||
# Import 3rd-party modules.
|
||||
|
||||
# Import project modules.
|
||||
from kaipy.testing import common
|
||||
import common
|
||||
|
||||
|
||||
# Program constants
|
||||
@@ -110,21 +110,22 @@ def main():
|
||||
debug = args.debug
|
||||
be_loud = args.loud
|
||||
is_test = args.test
|
||||
slack_on_fail = args.slack_on_fail
|
||||
verbose = args.verbose
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
if debug:
|
||||
print(f"Starting {sys.argv[0]} at {datetime.datetime.now()}")
|
||||
print(f"Current directory is {os.getcwd()}")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Make a directory to hold all of the initial condition build tests.
|
||||
print(f"Creating ${INITIAL_CONDITION_BUILD_TEST_DIRECTORY}.")
|
||||
os.mkdir(INITIAL_CONDITION_BUILD_TEST_DIRECTORY)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Make a list of module sets to build with.
|
||||
|
||||
@@ -135,13 +136,13 @@ def main():
|
||||
if debug:
|
||||
print(f"module_list_files = {module_list_files}")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Get a list of initial conditions to try, ignoring files in the
|
||||
# "deprecated" folder. GAMERA ONLY FOR NOW.
|
||||
# "deprecated" folder.
|
||||
initial_condition_paths = []
|
||||
|
||||
for root, directories, filenames in os.walk(
|
||||
for root, _, filenames in os.walk(
|
||||
INITIAL_CONDITION_SRC_DIRECTORY
|
||||
):
|
||||
if 'deprecated' not in root and 'underdev' not in root:
|
||||
@@ -150,7 +151,7 @@ def main():
|
||||
if debug:
|
||||
print(f"initial_condition_paths = {initial_condition_paths}")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Initalize test results for all module sets and initial conditions to
|
||||
# False (failed).
|
||||
@@ -303,19 +304,13 @@ def main():
|
||||
# End loop over initial conditions
|
||||
# End loop over module sets
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
# Set up for communication with Slack.
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Detail the test results
|
||||
test_report_details_string = ''
|
||||
test_report_details_string += (
|
||||
f"Test results are in {os.getcwd()}.\n"
|
||||
'Test results are on `derecho` in '
|
||||
f"`{INITIAL_CONDITION_BUILD_TEST_DIRECTORY}`.\n"
|
||||
)
|
||||
for (i_test, module_list_file) in enumerate(module_list_files):
|
||||
module_set_name = module_list_file.rstrip('.lst')
|
||||
@@ -333,41 +328,40 @@ def main():
|
||||
test_report_details_string += '*PASSED*\n'
|
||||
else:
|
||||
test_report_details_string += '*FAILED*\n'
|
||||
test_report_details_string += 'This module set used:\n'
|
||||
path = os.path.join(MODULE_LIST_DIRECTORY, module_list_file)
|
||||
lines = open(path).readlines()
|
||||
for line in lines:
|
||||
test_report_details_string += f"{line}\n"
|
||||
|
||||
|
||||
# Summarize the test results.
|
||||
test_report_summary_string = (
|
||||
'Summary of initial condition build test results from `ICtest.py`'
|
||||
f" for branch or commit or tag {BRANCH_OR_COMMIT}: "
|
||||
f" for branch or commit or tag `{BRANCH_OR_COMMIT}`: "
|
||||
)
|
||||
if 'FAILED' in test_report_details_string:
|
||||
test_report_summary_string += '*FAILED*\n'
|
||||
test_report_summary_string += 'Details in thread.\n'
|
||||
else:
|
||||
test_report_summary_string += '*ALL PASSED*\n'
|
||||
test_report_summary_string += '*PASSED*\n'
|
||||
|
||||
# Print the test results summary and details.
|
||||
print(test_report_summary_string)
|
||||
print(test_report_details_string)
|
||||
|
||||
# If loud mode is on, post report to Slack.
|
||||
if be_loud:
|
||||
# If a test failed, or loud mode is on, post report to Slack.
|
||||
if (slack_on_fail and 'FAILED' in test_report_details_string) or be_loud:
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_summary_string, is_test=is_test
|
||||
)
|
||||
if 'FAILED' in test_report_summary_string:
|
||||
thread_ts = slack_response_summary['ts']
|
||||
_ = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
if debug:
|
||||
print(f"Ending {sys.argv[0]} at {datetime.datetime.now()}")
|
||||
|
||||
@@ -20,7 +20,7 @@ import sys
|
||||
# Import 3rd-party modules.
|
||||
|
||||
# Import project modules.
|
||||
from kaipy.testing import common
|
||||
import common
|
||||
|
||||
|
||||
# Program constants
|
||||
@@ -88,7 +88,7 @@ def main():
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
# Detail the test results
|
||||
test_details_message = ''
|
||||
test_report_details_string = ''
|
||||
test_report_details_string += (
|
||||
f"Test results are in {os.getcwd()}.\n"
|
||||
)
|
||||
@@ -98,7 +98,7 @@ def main():
|
||||
has_error = False
|
||||
has_pass = False
|
||||
if os.path.exists(PYTHON_UNIT_TEST_LOG_FILE):
|
||||
test_details_message += 'Python unit tests ran to completion.\n'
|
||||
test_report_details_string += 'Python unit tests ran to completion.\n'
|
||||
with open(PYTHON_UNIT_TEST_LOG_FILE, encoding='utf-8') as f:
|
||||
lines = f.readlines()
|
||||
last_line = lines[-1]
|
||||
@@ -109,17 +109,19 @@ def main():
|
||||
if 'passed' in last_line:
|
||||
has_pass = True
|
||||
if has_error:
|
||||
test_details_message += 'Python unit tests error detected.\n'
|
||||
test_report_details_string += 'Python unit tests error detected.\n'
|
||||
if has_fail:
|
||||
test_details_message += 'Python unit tests: *FAILED*\n'
|
||||
test_report_details_string += 'Python unit tests: *FAILED*\n'
|
||||
if has_pass and not has_error and not has_fail:
|
||||
test_details_message += 'Python unit tests: *PASSED*\n'
|
||||
test_report_details_string += 'Python unit tests: *PASSED*\n'
|
||||
if not has_pass and not has_error and not has_fail:
|
||||
test_details_message += (
|
||||
test_report_details_string += (
|
||||
'Unexpected error occured during python unit tests.\n'
|
||||
)
|
||||
else:
|
||||
test_details_message += 'Python unit tests did not run to completion.\n'
|
||||
test_report_details_string += (
|
||||
'Python unit tests did not run to completion.\n'
|
||||
)
|
||||
has_pass = False
|
||||
|
||||
# Summarize the test results.
|
||||
@@ -140,7 +142,7 @@ def main():
|
||||
if slack_response_summary['ok']:
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_details = common.slack_send_message(
|
||||
slack_client, test_details_message, thread_ts=thread_ts,
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
else:
|
||||
@@ -153,5 +155,4 @@ def main():
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
"""Call main program function."""
|
||||
main()
|
||||
224
testingScripts/_obsolete/pyunitTest.py
Executable file
224
testingScripts/_obsolete/pyunitTest.py
Executable file
@@ -0,0 +1,224 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""Run MAGE python unit tests.
|
||||
|
||||
This script runs a series of unit tests of the MAGE python software.
|
||||
|
||||
Authors
|
||||
-------
|
||||
Jeff Garretson
|
||||
Eric Winter
|
||||
"""
|
||||
|
||||
|
||||
# Import standard modules.
|
||||
import datetime
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# Import 3rd-party modules.
|
||||
from jinja2 import Template
|
||||
|
||||
# Import project modules.
|
||||
import common
|
||||
|
||||
|
||||
# Program constants
|
||||
|
||||
# Program description.
|
||||
DESCRIPTION = 'Script for MAGE python unit testing'
|
||||
|
||||
|
||||
# Paths under this test session directory.
|
||||
|
||||
# Path to directory for this set of python unit tests.
|
||||
PYUNIT_TEST_DIRECTORY = os.path.join(os.environ['MAGE_TEST_SET_ROOT'],
|
||||
'pyunitTest')
|
||||
|
||||
# Name of PBS file to create from the jinja2 template for the tests.
|
||||
PBS_FILE = 'pyunitTest.pbs'
|
||||
|
||||
|
||||
# Paths under the kaiju installation to test.
|
||||
|
||||
# Top of installation tree for kaiju installation to test.
|
||||
KAIJUHOME = os.environ['KAIJUHOME']
|
||||
|
||||
# Path to testing script directory.
|
||||
KAIJU_TEST_SCRIPTS_DIRECTORY = os.path.join(KAIJUHOME, 'testingScripts')
|
||||
|
||||
# Path to pytests directory.
|
||||
KAIJU_PYTESTS_DIRECTORY = os.path.join(KAIJUHOME, 'pytests')
|
||||
|
||||
# Path to jinja2 template file for PBS script for weekly dash runs.
|
||||
PBS_TEMPLATE = os.path.join(
|
||||
KAIJU_TEST_SCRIPTS_DIRECTORY, 'pyunitTest-template.pbs'
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
"""Begin main program.
|
||||
|
||||
This is the main program code.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
None
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
|
||||
Raises
|
||||
------
|
||||
None
|
||||
"""
|
||||
# Set up the command-line parser.
|
||||
parser = common.create_command_line_parser(DESCRIPTION)
|
||||
|
||||
# Parse the command-line arguments.
|
||||
args = parser.parse_args()
|
||||
if args.debug:
|
||||
print(f"args = {args}")
|
||||
debug = args.debug
|
||||
be_loud = args.loud
|
||||
is_test = args.test
|
||||
verbose = args.verbose
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
if debug:
|
||||
print(f"Starting {sys.argv[0]} at {datetime.datetime.now()}")
|
||||
print(f"Current directory is {os.getcwd()}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Make a directory to hold the python unit tests, and go there.
|
||||
if verbose:
|
||||
print(f"Creating {PYUNIT_TEST_DIRECTORY}.")
|
||||
os.mkdir(PYUNIT_TEST_DIRECTORY)
|
||||
os.chdir(PYUNIT_TEST_DIRECTORY)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Create the PBS script for this test session from the template.
|
||||
|
||||
# Read the template for the PBS script used for the tests.
|
||||
with open(PBS_TEMPLATE, 'r', encoding='utf-8') as f:
|
||||
template_content = f.read()
|
||||
pbs_template = Template(template_content)
|
||||
if debug:
|
||||
print(f"pbs_template = {pbs_template}")
|
||||
|
||||
# Set the values for the template fields.
|
||||
pbs_options = {}
|
||||
pbs_options['job_name'] = 'pyunitTest'
|
||||
pbs_options['account'] = os.environ['DERECHO_TESTING_ACCOUNT']
|
||||
pbs_options['queue'] = 'main'
|
||||
pbs_options['job_priority'] = 'economy'
|
||||
pbs_options['walltime'] = '01:00:00'
|
||||
pbs_options['select'] = '1'
|
||||
pbs_options['ncpus'] = '128'
|
||||
pbs_options['mpiprocs'] = '1'
|
||||
pbs_options['ompthreads'] = '128'
|
||||
pbs_options['mage_test_root'] = os.environ['MAGE_TEST_ROOT']
|
||||
pbs_options['cdf_setup_script'] = (
|
||||
f"{os.environ['MAGE_TEST_ROOT']}/local/cdf/3.9.0/bin/definitions.B"
|
||||
)
|
||||
pbs_options['condarc'] = os.environ['CONDARC']
|
||||
pbs_options['conda_envs_path'] = os.environ['CONDA_ENVS_PATH']
|
||||
pbs_options['conda_environment'] = 'kaiju-3.8-testing'
|
||||
pbs_options['kaijuhome'] = os.environ['KAIJUHOME']
|
||||
pbs_options['tmpdir'] = os.environ['TMPDIR']
|
||||
pbs_options['slack_bot_token'] = os.environ['SLACK_BOT_TOKEN']
|
||||
pbs_options['pytest_output_file'] = 'kaiju-pyunit.txt'
|
||||
|
||||
# Render the template and write it to a file.
|
||||
pbs_content = pbs_template.render(pbs_options)
|
||||
with open(PBS_FILE, 'w', encoding='utf-8') as f:
|
||||
f.write(pbs_content)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Copy the python unit test files from the source tree.
|
||||
for filename in ['test_satcomp_cdasws.py']:
|
||||
from_path = os.path.join(KAIJU_PYTESTS_DIRECTORY, filename)
|
||||
to_path = os.path.join('.', filename)
|
||||
shutil.copyfile(from_path, to_path)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Run the python unit tests as a PBS job.
|
||||
|
||||
# Submit the unit test script for python.
|
||||
cmd = f"qsub {PBS_FILE}"
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
cproc = subprocess.run(cmd, shell=True, check=True, text=True,
|
||||
capture_output=True)
|
||||
readString = cproc.stdout.rstrip()
|
||||
if debug:
|
||||
print(f"readString = {readString}")
|
||||
job_name_1 = readString.split('.')[0]
|
||||
if debug:
|
||||
print(f"job_name_1 = {job_name_1}")
|
||||
if verbose:
|
||||
print(
|
||||
f"Python unit test PBS script {PBS_FILE} submitted as "
|
||||
f"job {job_name_1}."
|
||||
)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
# Set up for communication with Slack.
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Detail the test results
|
||||
test_report_details_string = ''
|
||||
test_report_details_string += (
|
||||
f"Test results are in {os.getcwd()}.\n"
|
||||
)
|
||||
test_report_details_string += (
|
||||
'Python unit test PBS job script `pyunit.pbs` submitted as job '
|
||||
f"{job_name_1}.\n"
|
||||
)
|
||||
|
||||
# Summarize the test results
|
||||
test_summary_message = (
|
||||
'Python unit tests submitted by `pyunitTest.py`'
|
||||
f" for branch or commit or tag {os.environ['BRANCH_OR_COMMIT']}: "
|
||||
)
|
||||
|
||||
# Print the test results summary and details.
|
||||
print(test_summary_message)
|
||||
print(test_report_details_string)
|
||||
|
||||
# If loud mode is on, post report to Slack.
|
||||
if be_loud:
|
||||
test_summary_message += 'Details in thread for this messsage.\n'
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_summary_message, is_test=is_test
|
||||
)
|
||||
if slack_response_summary['ok']:
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_details = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
else:
|
||||
print('*ERROR* Unable to post test result summary to Slack.')
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
if debug:
|
||||
print(f"Ending {sys.argv[0]} at {datetime.datetime.now()}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -34,7 +34,7 @@
|
||||
</CHIMP>
|
||||
<REMIX>
|
||||
<conductance doStarlight="T" doRamp="F"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="F"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="F"/>
|
||||
</REMIX>
|
||||
<RCM>
|
||||
<rcmdomain domType="ELLIPSE"/>
|
||||
|
||||
@@ -31,7 +31,7 @@ import sys
|
||||
# Import 3rd-party modules.
|
||||
|
||||
# Import project modules.
|
||||
from kaipy.testing import common
|
||||
import common
|
||||
|
||||
|
||||
# Program constants
|
||||
@@ -49,9 +49,6 @@ BUILD_TEST_DIRECTORY = os.path.join(MAGE_TEST_SET_ROOT, 'buildTest')
|
||||
EXECUTABLE_LIST_BUILD_DIRECTORY = os.path.join(BUILD_TEST_DIRECTORY,
|
||||
'build_executable_list')
|
||||
|
||||
# Prefix for naming build test directories
|
||||
BUILD_TEST_DIRECTORY_PREFIX = 'buildTest_'
|
||||
|
||||
# Home directory of kaiju installation
|
||||
KAIJUHOME = os.environ['KAIJUHOME']
|
||||
|
||||
@@ -63,11 +60,15 @@ MODULE_LIST_DIRECTORY = os.path.join(TEST_SCRIPTS_DIRECTORY,
|
||||
'mage_build_test_modules')
|
||||
|
||||
# Path to module list file to use when generating the list of executables
|
||||
EXECUTABLE_LIST_MODULE_LIST = os.path.join(MODULE_LIST_DIRECTORY, '01.lst')
|
||||
# Use a module set without MKL.
|
||||
EXECUTABLE_LIST_MODULE_LIST = os.path.join(MODULE_LIST_DIRECTORY, '04.lst')
|
||||
|
||||
# Path to file containing list of module sets to use for build tests
|
||||
BUILD_TEST_LIST_FILE = os.path.join(MODULE_LIST_DIRECTORY, 'build_test.lst')
|
||||
|
||||
# Prefix for naming build test directories
|
||||
BUILD_TEST_DIRECTORY_PREFIX = 'buildTest_'
|
||||
|
||||
# Name of subdirectory of current build subdirectory containing binaries
|
||||
BUILD_BIN_DIR = 'bin'
|
||||
|
||||
@@ -103,6 +104,7 @@ def main():
|
||||
debug = args.debug
|
||||
be_loud = args.loud
|
||||
is_test = args.test
|
||||
slack_on_fail = args.slack_on_fail
|
||||
verbose = args.verbose
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
@@ -226,7 +228,7 @@ def main():
|
||||
# Do a build with each set of modules.
|
||||
for (i_test, module_list_file) in enumerate(module_list_files):
|
||||
if verbose:
|
||||
print('Performing build test with module set '
|
||||
print('Performing build test with module list file '
|
||||
f"{module_list_file}")
|
||||
|
||||
# Extract the name of the list.
|
||||
@@ -331,17 +333,10 @@ def main():
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
# Set up for communication with Slack.
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
# Detail the test results
|
||||
test_report_details_string = ''
|
||||
test_report_details_string += (
|
||||
f"Test results are in {os.getcwd()}.\n"
|
||||
f"Test results are on `derecho` in `{BUILD_TEST_DIRECTORY}`.\n"
|
||||
)
|
||||
for (i_test, module_list_file) in enumerate(module_list_files):
|
||||
module_set_name = module_list_file.rstrip('.lst')
|
||||
@@ -350,40 +345,40 @@ def main():
|
||||
test_report_details_string += '*PASSED*\n'
|
||||
else:
|
||||
test_report_details_string += '*FAILED*\n'
|
||||
test_report_details_string += 'This module set used:\n'
|
||||
path = os.path.join(MODULE_LIST_DIRECTORY, module_list_file)
|
||||
lines = open(path).readlines()
|
||||
for line in lines:
|
||||
test_report_details_string += f"{line}\n"
|
||||
|
||||
# Summarize the test results.
|
||||
test_report_summary_string = (
|
||||
'Summary of build test results from `buildTest.py`'
|
||||
f" for branch or commit or tag {BRANCH_OR_COMMIT}: "
|
||||
f" for branch or commit or tag `{BRANCH_OR_COMMIT}`: "
|
||||
)
|
||||
if 'FAILED' in test_report_details_string:
|
||||
test_report_summary_string += '*FAILED*\n'
|
||||
test_report_summary_string += 'Details in thread.\n'
|
||||
test_report_summary_string += '*FAILED*'
|
||||
else:
|
||||
test_report_summary_string += '*ALL PASSED*\n'
|
||||
test_report_summary_string += '*PASSED*'
|
||||
|
||||
# Print the test results summary and details.
|
||||
print(test_report_summary_string)
|
||||
print(test_report_details_string)
|
||||
|
||||
# If loud mode is on, post report to Slack.
|
||||
if be_loud:
|
||||
# If a test failed, or loud mode is on, post report to Slack.
|
||||
if (slack_on_fail and 'FAILED' in test_report_details_string) or be_loud:
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_summary_string, is_test=is_test
|
||||
)
|
||||
if 'FAILED' in test_report_summary_string:
|
||||
thread_ts = slack_response_summary['ts']
|
||||
_ = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
if debug:
|
||||
print(f"Ending {sys.argv[0]} at {datetime.datetime.now()}")
|
||||
|
||||
@@ -104,22 +104,18 @@ def create_command_line_parser(description):
|
||||
None
|
||||
"""
|
||||
parser = argparse.ArgumentParser(description=description)
|
||||
parser.add_argument(
|
||||
'--account', default=os.environ['DERECHO_TESTING_ACCOUNT'],
|
||||
help='PBS account to use for testing (default: %(default)s)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--debug', '-d', action='store_true',
|
||||
help='Print debugging output (default: %(default)s).'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--force', '-f', action='store_true',
|
||||
help='Force all tests to run (default: %(default)s).'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--loud', '-l', action='store_true',
|
||||
help='Enable loud mode (post results to Slack) (default: %(default)s).'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--slack_on_fail', '-s', action='store_true', default=False,
|
||||
help='Only post to Slack on test failure (default: %(default)s).'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--test', '-t', action='store_true',
|
||||
help='Enable testing mode (default: %(default)s).'
|
||||
@@ -1,19 +1,24 @@
|
||||
# NOTE: Embedding the command sent to derecho via ssh in double-quotes
|
||||
# ensures proper remote argument parsing.
|
||||
|
||||
# Run quick tests every night on development and master.
|
||||
05 23 * * * ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing_fixes/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'buildTest.py -lv,ICtest.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/nightly-tests-development.out 2>&1
|
||||
19 23 * * * ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing_fixes/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'buildTest.py -lv,ICtest.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/nightly-tests-master.out 2>&1
|
||||
# Run build, initial condition, and Intel Inspector tests every morning on the
|
||||
# development and master branches. These tests do not require PBS jobs and do
|
||||
# not modify the source tree.
|
||||
05 00 * * * ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'buildTest.py -lv,ICtest.py -lv,intelChecks.py -sv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/nightly-tests-1-development.out 2>&1
|
||||
10 00 * * * ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'buildTest.py -lv,ICtest.py -lv,intelChecks.py -sv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/nightly-tests-1-master.out 2>&1
|
||||
|
||||
# Run tests requiring PBS jobs on Sunday nights for development and master.
|
||||
# Non-dash tests
|
||||
15 23 * * 0 ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing_fixes/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'pyunitTest.py -lv,intelChecks.py -lv,unitTest.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/weekly-tests-development.out 2>&1
|
||||
20 23 * * 0 ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing_fixes/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'pyunitTest.py -lv,intelChecks.py -lv,unitTest.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/weekly-tests-master.out 2>&1
|
||||
# Run Fortran unit tests every morning, separately (since they change the
|
||||
# source tree).
|
||||
15 00 * * * ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'unitTest.py -sv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/nightly-tests-2-development.out 2>&1
|
||||
20 00 * * * ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'unitTest.py -sv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/nightly-tests-2-master.out 2>&1
|
||||
|
||||
# Run weekly dashes every Monday morning for the development and master
|
||||
# branches.
|
||||
|
||||
# Duplicate dashes for development
|
||||
25 23 * * 0 ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing_fixes/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'weeklyDash.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/weeklyDash-development-1.out 2>&1
|
||||
30 23 * * 0 ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing_fixes/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'weeklyDash.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/weeklyDash-development-2.out 2>&1
|
||||
25 00 * * 1 ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'weeklyDash.py -sv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/weeklyDash-1-development.out 2>&1
|
||||
30 00 * * 1 ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'weeklyDash.py -sv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/weeklyDash-2-development.out 2>&1
|
||||
|
||||
# Duplicate dashes for master
|
||||
35 23 * * 0 ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing_fixes/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'weeklyDash.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/weeklyDash-master-1.out 2>&1
|
||||
40 23 * * 0 ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing_fixes/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'weeklyDash.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/weeklyDash-master-2.out 2>&1
|
||||
35 00 * * 1 ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'weeklyDash.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/weeklyDash-1-master.out 2>&1
|
||||
40 00 * * 1 ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'weeklyDash.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/weeklyDash-2-master.out 2>&1
|
||||
|
||||
@@ -19,6 +19,7 @@ module load {{ module }}
|
||||
module list
|
||||
|
||||
echo 'Loading python environment.'
|
||||
mage_test_root='{{ mage_test_root }}'
|
||||
export CONDARC="${mage_test_root}/condarc"
|
||||
export CONDA_ENVS_PATH="${mage_test_root}/conda"
|
||||
mage_miniconda3="${mage_test_root}/miniconda3"
|
||||
@@ -41,15 +42,12 @@ source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
export TMPDIR={{ tmpdir }}
|
||||
export OMP_NUM_THREADS=128
|
||||
export MPI_TYPE_DEPTH=32
|
||||
export KMP_STACKSIZE=128M
|
||||
export SLACK_BOT_TOKEN={{ slack_bot_token }}
|
||||
export DERECHO_TESTING_ACCOUNT={{ account }}
|
||||
echo 'The active environment variables are:'
|
||||
printenv
|
||||
|
||||
# Create the report.
|
||||
python ${KAIJUHOME}/testingScripts/intelChecksReport.py -dltv
|
||||
python ${KAIJUHOME}/testingScripts/intelChecksReport.py {{ report_options }}
|
||||
|
||||
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
@@ -21,15 +21,15 @@ Eric Winter
|
||||
# Import standard modules.
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# Import 3rd-party modules.
|
||||
from jinja2 import Template
|
||||
|
||||
# Import project modules.
|
||||
from kaipy.testing import common
|
||||
import common
|
||||
|
||||
|
||||
# Program constants
|
||||
@@ -56,15 +56,6 @@ MODULE_LIST_DIRECTORY = os.path.join(TEST_SCRIPTS_DIRECTORY,
|
||||
# Name of file containing names of modules lists to use for Intel checks
|
||||
INTEL_CHECKS_LIST_FILE = os.path.join(MODULE_LIST_DIRECTORY, 'intelChecks.lst')
|
||||
|
||||
# Prefix for naming Intel Inspector checks directories
|
||||
INTEL_CHECKS_DIRECTORY_PREFIX = 'intelChecks_'
|
||||
|
||||
# Name of build subdirectory containing binaries
|
||||
BUILD_BIN_DIR = 'bin'
|
||||
|
||||
# Name of PBS account to use for testing jobs.
|
||||
DERECHO_TESTING_ACCOUNT = os.environ['DERECHO_TESTING_ACCOUNT']
|
||||
|
||||
# Path to jinja2 template file for PBS script for the memory tests.
|
||||
MEM_CHECK_PBS_TEMPLATE = os.path.join(
|
||||
TEST_SCRIPTS_DIRECTORY, 'intelCheckSubmitMem-template.pbs'
|
||||
@@ -80,15 +71,31 @@ REPORT_PBS_TEMPLATE = os.path.join(
|
||||
TEST_SCRIPTS_DIRECTORY, 'intelCheckSubmitReport-template.pbs'
|
||||
)
|
||||
|
||||
# Prefix for naming Intel Inspector checks directories
|
||||
INTEL_CHECKS_DIRECTORY_PREFIX = 'intelChecks_'
|
||||
|
||||
# Name of build subdirectory containing binaries
|
||||
BUILD_BIN_DIR = 'bin'
|
||||
|
||||
# Name of PBS file for memory checks.
|
||||
MEM_CHECK_PBS_FILENAME = 'intelCheckSubmitMem.pbs'
|
||||
|
||||
# Name of PBS file for thread checks.
|
||||
THREAD_CHECK_PBS_FILENAME = 'intelCheckSubmitThread.pbs'
|
||||
|
||||
# Name of PBS file for report.
|
||||
# Name of PBS file for report generation.
|
||||
REPORT_PBS_FILENAME = 'intelCheckSubmitReport.pbs'
|
||||
|
||||
# Data and configuration files used by the Intel Inspector tests.
|
||||
TEST_INPUT_FILES = [
|
||||
'tinyCase.xml',
|
||||
'bcwind.h5',
|
||||
'lfmD.h5',
|
||||
'rcmconfig.h5',
|
||||
'memSuppress.sup',
|
||||
'threadSuppress.sup',
|
||||
]
|
||||
|
||||
|
||||
def main():
|
||||
"""Begin main program.
|
||||
@@ -115,9 +122,9 @@ def main():
|
||||
args = parser.parse_args()
|
||||
if args.debug:
|
||||
print(f"args = {args}")
|
||||
# account = args.account
|
||||
debug = args.debug
|
||||
be_loud = args.loud
|
||||
slack_on_fail = args.slack_on_fail
|
||||
is_test = args.test
|
||||
verbose = args.verbose
|
||||
|
||||
@@ -177,8 +184,20 @@ def main():
|
||||
if debug:
|
||||
print(f"make_cmd = {make_cmd}")
|
||||
|
||||
# Create the list for submit results. Only set to True if all qsub commands
|
||||
# for a set are OK.
|
||||
submit_ok = [False]*len(module_list_files)
|
||||
if debug:
|
||||
print(f"submit_ok = {submit_ok}")
|
||||
|
||||
# Create a list of lists for job IDs. There are 3 job IDs per set - one for
|
||||
# memory check, one for thread check, and one for the test report.
|
||||
job_ids = [[None, None, None]]*len(module_list_files)
|
||||
if debug:
|
||||
print(f"job_ids = {job_ids}")
|
||||
|
||||
# Run Intel checks with each set of modules.
|
||||
for module_list_file in module_list_files:
|
||||
for (i_module_set, module_list_file) in enumerate(module_list_files):
|
||||
if verbose:
|
||||
print('Performing Intel Inspector checks with module set '
|
||||
f"{module_list_file}.")
|
||||
@@ -201,6 +220,12 @@ def main():
|
||||
print(f"cmake_environment = {cmake_environment}")
|
||||
print(f"cmake_options = {cmake_options}")
|
||||
|
||||
# Add the additional flags needed for Intel Inspector checks.
|
||||
cmake_options += ' -DDISABLE_DEBUG_BOUNDS_CHECKS=ON'
|
||||
cmake_options += ' -DCMAKE_BUILD_TYPE=DEBUG'
|
||||
if debug:
|
||||
print(f"cmake_options = {cmake_options}")
|
||||
|
||||
# Assemble the commands to load the listed modules.
|
||||
module_cmd = (
|
||||
f"module --force purge; module load {' '.join(module_names)}"
|
||||
@@ -208,12 +233,6 @@ def main():
|
||||
if debug:
|
||||
print(f"module_cmd = {module_cmd}")
|
||||
|
||||
# Add the additional flags needed for Intel Inspector checks.
|
||||
cmake_options += ' -DDISABLE_DEBUG_BOUNDS_CHECKS=ON'
|
||||
cmake_options += ' -DCMAKE_BUILD_TYPE=DEBUG'
|
||||
if debug:
|
||||
print(f"cmake_options = {cmake_options}")
|
||||
|
||||
# Make a directory for this test, and go there.
|
||||
dir_name = f"{INTEL_CHECKS_DIRECTORY_PREFIX}{module_set_name}"
|
||||
build_directory = os.path.join(INTEL_CHECKS_DIRECTORY, dir_name)
|
||||
@@ -224,37 +243,30 @@ def main():
|
||||
|
||||
# Run cmake to build the Makefile.
|
||||
if verbose:
|
||||
print(
|
||||
'Running cmake to create Makefile for module set'
|
||||
f" {module_set_name}."
|
||||
)
|
||||
cmd = (
|
||||
f"{module_cmd}; {cmake_environment} cmake {cmake_options}"
|
||||
f" {KAIJUHOME} >& cmake.out"
|
||||
)
|
||||
print('Running cmake to create Makefile for module set'
|
||||
f" {module_set_name}.")
|
||||
cmd = (f"{module_cmd}; {cmake_environment} cmake {cmake_options}"
|
||||
f" {KAIJUHOME} >& cmake.out")
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
# NOTE: stdout and stderr goes cmake.out.
|
||||
_ = subprocess.run(cmd, shell=True, check=True)
|
||||
cproc = subprocess.run(cmd, shell=True, check=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(
|
||||
f"ERROR: cmake for module set {module_set_name} failed.\n"
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
f"See {os.path.join(build_directory, 'cmake.out')}"
|
||||
' for output from cmake.\n'
|
||||
f"Skipping remaining steps for module set {module_set_name}.",
|
||||
file=sys.stderr
|
||||
)
|
||||
print(f"ERROR: cmake for module set {module_set_name} failed.\n"
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
f"See {os.path.join(build_directory, 'cmake.out')}"
|
||||
' for output from cmake.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}.",
|
||||
file=sys.stderr)
|
||||
continue
|
||||
|
||||
# Run the build.
|
||||
if verbose:
|
||||
print(
|
||||
'Running make to build kaiju for module set'
|
||||
f" {module_set_name}."
|
||||
)
|
||||
print('Running make to build kaiju for module set'
|
||||
f" {module_set_name}.")
|
||||
cmd = f"{module_cmd}; {make_cmd} >& make.out"
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
@@ -262,37 +274,28 @@ def main():
|
||||
# NOTE: stdout and stderr go into make.out.
|
||||
cproc = subprocess.run(cmd, shell=True, check=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(
|
||||
f"ERROR: make for module set {module_set_name} failed.\n"
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
f"See {os.path.join(build_directory, 'make.out')}"
|
||||
' for output from make.\n'
|
||||
f"Skipping remaining steps for module set {module_set_name}.",
|
||||
file=sys.stderr
|
||||
)
|
||||
print(f"ERROR: make for module set {module_set_name} failed.\n"
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
f"See {os.path.join(build_directory, 'make.out')}"
|
||||
' for output from make.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}.",
|
||||
file=sys.stderr)
|
||||
continue
|
||||
|
||||
# Go to the bin directory for testing.
|
||||
os.chdir(BUILD_BIN_DIR)
|
||||
|
||||
# Copy in the files used by the tests.
|
||||
test_files = [
|
||||
'tinyCase.xml',
|
||||
'bcwind.h5',
|
||||
'lfmD.h5',
|
||||
'rcmconfig.h5',
|
||||
'memSuppress.sup',
|
||||
'threadSuppress.sup',
|
||||
]
|
||||
for filename in test_files:
|
||||
for filename in TEST_INPUT_FILES:
|
||||
from_path = os.path.join(TEST_SCRIPTS_DIRECTORY, filename)
|
||||
to_path = os.path.join('.', filename)
|
||||
shutil.copyfile(from_path, to_path)
|
||||
|
||||
# Assemble common data to fill in the PBS templates.
|
||||
pbs_options = {}
|
||||
pbs_options['account'] = DERECHO_TESTING_ACCOUNT
|
||||
pbs_options['account'] = os.environ['DERECHO_TESTING_ACCOUNT']
|
||||
pbs_options['queue'] = os.environ['DERECHO_TESTING_QUEUE']
|
||||
pbs_options['job_priority'] = os.environ['DERECHO_TESTING_PRIORITY']
|
||||
pbs_options['modules'] = module_names
|
||||
@@ -300,7 +303,6 @@ def main():
|
||||
pbs_options['tmpdir'] = os.environ['TMPDIR']
|
||||
pbs_options['slack_bot_token'] = os.environ['SLACK_BOT_TOKEN']
|
||||
pbs_options['mage_test_root'] = os.environ['MAGE_TEST_ROOT']
|
||||
pbs_options['derecho_testing_account'] = os.environ['DERECHO_TESTING_ACCOUNT']
|
||||
|
||||
# Set options specific to the memory check, then render the template.
|
||||
pbs_options['job_name'] = 'mage_intelCheckSubmitMem'
|
||||
@@ -320,13 +322,21 @@ def main():
|
||||
# template.
|
||||
pbs_options['job_name'] = 'mage_intelCheckReportSubmit'
|
||||
pbs_options['walltime'] = '02:00:00'
|
||||
pbs_options['report_options'] = ''
|
||||
if debug:
|
||||
pbs_options['report_options'] += ' -d'
|
||||
# if be_loud:
|
||||
pbs_options['report_options'] += ' -l' # Always post report
|
||||
if slack_on_fail:
|
||||
pbs_options['report_options'] += ' -s'
|
||||
if is_test:
|
||||
pbs_options['report_options'] += ' -t'
|
||||
if verbose:
|
||||
pbs_options['report_options'] += ' -v'
|
||||
pbs_content = report_pbs_template.render(pbs_options)
|
||||
with open(REPORT_PBS_FILENAME, 'w', encoding='utf-8') as f:
|
||||
f.write(pbs_content)
|
||||
|
||||
# Initialize the job ID list.
|
||||
job_ids = []
|
||||
|
||||
# Run the memory check job.
|
||||
cmd = f"qsub {MEM_CHECK_PBS_FILENAME}"
|
||||
if debug:
|
||||
@@ -335,19 +345,18 @@ def main():
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
text=True, capture_output=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(
|
||||
'ERROR: qsub failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See test log for output.\n'
|
||||
f"Skipping remaining steps for module set {module_set_name}.",
|
||||
file=sys.stderr
|
||||
)
|
||||
print('ERROR: qsub failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See test log for output.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}.",
|
||||
file=sys.stderr)
|
||||
continue
|
||||
job_id = cproc.stdout.split('.')[0]
|
||||
if debug:
|
||||
print(f"job_id = {job_id}")
|
||||
job_ids.append(job_id)
|
||||
job_ids[i_module_set][0] = job_id
|
||||
|
||||
# Run the thread check job.
|
||||
cmd = f"qsub {THREAD_CHECK_PBS_FILENAME}"
|
||||
@@ -357,68 +366,78 @@ def main():
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
text=True, capture_output=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(
|
||||
'ERROR: qsub failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See test log for output.\n'
|
||||
f"Skipping remaining steps for module set {module_set_name}.",
|
||||
file=sys.stderr
|
||||
)
|
||||
print('ERROR: qsub failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See test log for output.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}.",
|
||||
file=sys.stderr)
|
||||
continue
|
||||
job_id = cproc.stdout.split('.')[0]
|
||||
if debug:
|
||||
print(f"job_id = {job_id}")
|
||||
job_ids.append(job_id)
|
||||
job_ids[i_module_set][1] = job_id
|
||||
|
||||
# Run the report job when the other two jobs are complete.
|
||||
cmd = f"qsub -W depend=afterok:{':'.join(job_ids)} {REPORT_PBS_FILENAME}"
|
||||
cmd = (
|
||||
f"qsub -W depend=afterok:{':'.join(job_ids[i_module_set][:-1])} "
|
||||
f"{REPORT_PBS_FILENAME}"
|
||||
)
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
text=True, capture_output=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(
|
||||
'ERROR: qsub failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See test log for output.\n'
|
||||
f"Skipping remaining steps for module set {module_set_name}.",
|
||||
file=sys.stderr
|
||||
)
|
||||
print('ERROR: qsub failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See test log for output.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}.",
|
||||
file=sys.stderr)
|
||||
continue
|
||||
job_id = cproc.stdout.split('.')[0]
|
||||
if debug:
|
||||
print(f"job_id = {job_id}")
|
||||
job_ids.append(job_id)
|
||||
job_ids[i_module_set][2] = job_id
|
||||
|
||||
# Record the job IDs.
|
||||
# Record the job IDs for this module set in a file.
|
||||
with open('jobs.txt', 'w', encoding='utf-8') as f:
|
||||
for job_id in job_ids:
|
||||
for job_id in job_ids[i_module_set]:
|
||||
f.write(f"{job_id}\n")
|
||||
|
||||
# This module set worked.
|
||||
submit_ok[i_module_set] = True
|
||||
|
||||
# End of loop over module sets.
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
|
||||
# Set up for communication with Slack.
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Detail the test results
|
||||
test_report_details_string = ''
|
||||
test_report_details_string += (
|
||||
f"Test results are in {os.getcwd()}.\n"
|
||||
f"Test results are on `derecho` in `{INTEL_CHECKS_DIRECTORY}`.\n"
|
||||
)
|
||||
pbs_files = [
|
||||
MEM_CHECK_PBS_FILENAME, THREAD_CHECK_PBS_FILENAME,
|
||||
REPORT_PBS_FILENAME
|
||||
]
|
||||
for (pbs_file, job_id) in zip(pbs_files, job_ids):
|
||||
for (i_module_set, module_list_file) in enumerate(module_list_files):
|
||||
if not submit_ok[i_module_set]:
|
||||
test_report_details_string += (
|
||||
f"Module set `{module_list_file}`: *FAILED*"
|
||||
)
|
||||
continue
|
||||
test_report_details_string += (
|
||||
f"{pbs_file} submitted as job {job_id}.\n"
|
||||
f"`{MEM_CHECK_PBS_FILENAME}` for module set `{module_list_file}` "
|
||||
f"submitted as PBS job {job_ids[i_module_set][0]}.\n"
|
||||
)
|
||||
test_report_details_string += (
|
||||
f"`{THREAD_CHECK_PBS_FILENAME}` for module set "
|
||||
f"`{module_list_file}` submitted as PBS job "
|
||||
f"{job_ids[i_module_set][1]}.\n"
|
||||
)
|
||||
test_report_details_string += (
|
||||
f"`{REPORT_PBS_FILENAME}` for module set `{module_list_file}` "
|
||||
f"submitted as PBS job {job_ids[i_module_set][2]}.\n"
|
||||
)
|
||||
|
||||
# Summarize the test results
|
||||
@@ -430,22 +449,23 @@ def main():
|
||||
print(test_report_summary_string)
|
||||
print(test_report_details_string)
|
||||
|
||||
# If loud mode is on, post report to Slack.
|
||||
if be_loud:
|
||||
test_report_summary_string += 'Details in thread for this messsage.\n'
|
||||
# If a test failed, or loud mode is on, post report to Slack.
|
||||
if (slack_on_fail and 'FAILED' in test_report_details_string) or be_loud:
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_summary_string, is_test=is_test
|
||||
)
|
||||
if slack_response_summary['ok']:
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_details = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if 'ok' not in slack_response_details:
|
||||
print('*ERROR* Unable to post test details to Slack.')
|
||||
else:
|
||||
print('*ERROR* Unable to post test summary to Slack.')
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ import sys
|
||||
# Import 3rd-party modules.
|
||||
|
||||
# Import project modules.
|
||||
from kaipy.testing import common
|
||||
import common
|
||||
|
||||
|
||||
# Program constants
|
||||
@@ -60,7 +60,9 @@ def main():
|
||||
print(f"args = {args}")
|
||||
debug = args.debug
|
||||
be_loud = args.loud
|
||||
slack_on_fail = args.slack_on_fail
|
||||
is_test = args.test
|
||||
verbose = args.verbose
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
@@ -72,7 +74,7 @@ def main():
|
||||
|
||||
# Check for for the job list file.
|
||||
if not os.path.exists('jobs.txt'):
|
||||
print('Nothing to report on.')
|
||||
print('jobs.txt not found. Nothing to report on.')
|
||||
sys.exit(0)
|
||||
|
||||
# Read in the jobs.txt file to get the job numbers
|
||||
@@ -110,15 +112,23 @@ def main():
|
||||
for _, dirs, _ in os.walk('.'):
|
||||
for d in dirs:
|
||||
if 'memResults' in d:
|
||||
if debug:
|
||||
print(f"Examining {d}.")
|
||||
try:
|
||||
memory_check_output = subprocess.check_output(
|
||||
['inspxe-cl', '-report summary', '-result-dir ' + d,
|
||||
'-s-f memSuppress.sup'],
|
||||
stderr=subprocess.STDOUT, universal_newlines=True)
|
||||
if debug:
|
||||
print(f"memory_check_output = {memory_check_output}")
|
||||
except subprocess.CalledProcessError as e:
|
||||
# we need to handle non-zero error code
|
||||
memory_check_output = e.output
|
||||
if debug:
|
||||
print(f"memory_check_output = {memory_check_output}")
|
||||
problem_match = re.search(problem_pattern, memory_check_output)
|
||||
if debug:
|
||||
print(f"problem_match = {problem_match}")
|
||||
if not problem_match or int(problem_match.group(1)) > 0:
|
||||
try:
|
||||
memory_check_output = subprocess.check_output(
|
||||
@@ -126,30 +136,46 @@ def main():
|
||||
'-result-dir ' + d, '-s-f memSuppress.sup',
|
||||
'-report-all'],
|
||||
stderr=subprocess.STDOUT, universal_newlines=True)
|
||||
if debug:
|
||||
print(
|
||||
f"memory_check_output = {memory_check_output}"
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# we need to handle non-zero error code
|
||||
memory_check_output = e.output
|
||||
with open(
|
||||
memory_errors_file, 'a', encoding='utf-8'
|
||||
) as f:
|
||||
f.write(memory_check_output)
|
||||
f.write('\n')
|
||||
if debug:
|
||||
print(
|
||||
f"memory_check_output = {memory_check_output}"
|
||||
)
|
||||
with open(
|
||||
memory_errors_file, 'a', encoding='utf-8'
|
||||
) as f:
|
||||
f.write(memory_check_output)
|
||||
f.write('\n')
|
||||
|
||||
# Thread
|
||||
thread_errors_file = 'combinedThreadErrs.txt'
|
||||
for _, dirs, _ in os.walk('.'):
|
||||
for d in dirs:
|
||||
if 'threadResults' in d:
|
||||
if debug:
|
||||
print(f"Examining {d}.")
|
||||
try:
|
||||
thread_check_output = subprocess.check_output(
|
||||
['inspxe-cl', '-report summary', '-result-dir ' + d,
|
||||
'-s-f threadSuppress.sup'],
|
||||
stderr=subprocess.STDOUT, universal_newlines=True
|
||||
)
|
||||
if debug:
|
||||
print(f"thread_check_output = {thread_check_output}")
|
||||
except subprocess.CalledProcessError as e:
|
||||
# we need to handle non-zero error code
|
||||
thread_check_output = e.output
|
||||
if debug:
|
||||
print(f"thread_check_output = {thread_check_output}")
|
||||
problem_match = re.search(problem_pattern, thread_check_output)
|
||||
if debug:
|
||||
print(f"problem_match = {problem_match}")
|
||||
if not problem_match or int(problem_match.group(1)) > 0:
|
||||
try:
|
||||
thread_check_output = subprocess.check_output([
|
||||
@@ -157,28 +183,29 @@ def main():
|
||||
'-result-dir ' + d, '-s-f threadSuppress.sup',
|
||||
'-report-all'],
|
||||
stderr=subprocess.STDOUT, universal_newlines=True)
|
||||
if debug:
|
||||
print(
|
||||
f"thread_check_output = {thread_check_output}"
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# we need to handle non-zero error code
|
||||
thread_check_output = e.output
|
||||
with open(
|
||||
thread_errors_file, 'a', encoding='utf-8'
|
||||
) as f:
|
||||
f.write(thread_check_output)
|
||||
f.write('\n')
|
||||
|
||||
# -----------------------------------------------------------------------
|
||||
|
||||
# Set up for communication with Slack.
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
if debug:
|
||||
print(
|
||||
f"thread_check_output = {thread_check_output}"
|
||||
)
|
||||
with open(
|
||||
thread_errors_file, 'a', encoding='utf-8'
|
||||
) as f:
|
||||
f.write(thread_check_output)
|
||||
f.write('\n')
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Detail the test results
|
||||
test_report_details_string = ''
|
||||
test_report_details_string += (
|
||||
f"Intel check results are in {os.getcwd()}.\n"
|
||||
f"Test results are in `{os.getcwd()}`.\n"
|
||||
)
|
||||
test_report_details_string += 'Results of memory tests:\n'
|
||||
with open(memory_errors_file, 'r', encoding='utf-8') as f:
|
||||
@@ -197,22 +224,23 @@ def main():
|
||||
print(test_report_summary_string)
|
||||
print(test_report_details_string)
|
||||
|
||||
# If loud mode is on, post report to Slack.
|
||||
if be_loud:
|
||||
test_report_summary_string += 'Details in thread for this messsage.\n'
|
||||
# If a test failed, or loud mode is on, post report to Slack.
|
||||
if (slack_on_fail and 'FAILED' in test_report_details_string) or be_loud:
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_summary_string, is_test=is_test
|
||||
)
|
||||
if slack_response_summary['ok']:
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_details = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if 'ok' not in slack_response_details:
|
||||
print('*ERROR* Unable to post test details to Slack.')
|
||||
else:
|
||||
print('*ERROR* Unable to post test summary to Slack.')
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
|
||||
151
testingScripts/null_test.py
Normal file
151
testingScripts/null_test.py
Normal file
@@ -0,0 +1,151 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""Null MAGE test.
|
||||
|
||||
This script does not run any tests, and only sends a message too Slacak if
|
||||
requested.
|
||||
|
||||
NOTE: These tests are performed on a load-balance-assigned login node on
|
||||
derecho. No PBS job is submitted.
|
||||
|
||||
Authors
|
||||
-------
|
||||
Jeff Garretson
|
||||
Eric Winter
|
||||
|
||||
"""
|
||||
|
||||
|
||||
# Import standard modules.
|
||||
import datetime
|
||||
import os
|
||||
# import subprocess
|
||||
import sys
|
||||
|
||||
# Import 3rd-party modules.
|
||||
|
||||
# Import project modules.
|
||||
import common
|
||||
|
||||
|
||||
# Program constants
|
||||
|
||||
# Program description.
|
||||
DESCRIPTION = 'Script for null MAGE test'
|
||||
|
||||
# # Root of directory tree for this set of tests.
|
||||
# MAGE_TEST_SET_ROOT = os.environ['MAGE_TEST_SET_ROOT']
|
||||
|
||||
# # Directory for build tests
|
||||
# BUILD_TEST_DIRECTORY = os.path.join(MAGE_TEST_SET_ROOT, 'buildTest')
|
||||
|
||||
# # Path to directory to use for building executable list
|
||||
# EXECUTABLE_LIST_BUILD_DIRECTORY = os.path.join(BUILD_TEST_DIRECTORY,
|
||||
# 'build_executable_list')
|
||||
|
||||
# # Home directory of kaiju installation
|
||||
# KAIJUHOME = os.environ['KAIJUHOME']
|
||||
|
||||
# # Path to directory containing the test scripts
|
||||
# TEST_SCRIPTS_DIRECTORY = os.path.join(KAIJUHOME, 'testingScripts')
|
||||
|
||||
# # Path to directory containing module lists
|
||||
# MODULE_LIST_DIRECTORY = os.path.join(TEST_SCRIPTS_DIRECTORY,
|
||||
# 'mage_build_test_modules')
|
||||
|
||||
# # Path to module list file to use when generating the list of executables
|
||||
# # Use a module set without MKL.
|
||||
# EXECUTABLE_LIST_MODULE_LIST = os.path.join(MODULE_LIST_DIRECTORY, '04.lst')
|
||||
|
||||
# # Path to file containing list of module sets to use for build tests
|
||||
# BUILD_TEST_LIST_FILE = os.path.join(MODULE_LIST_DIRECTORY, 'build_test.lst')
|
||||
|
||||
# # Prefix for naming build test directories
|
||||
# BUILD_TEST_DIRECTORY_PREFIX = 'buildTest_'
|
||||
|
||||
# # Name of subdirectory of current build subdirectory containing binaries
|
||||
# BUILD_BIN_DIR = 'bin'
|
||||
|
||||
# # Branch or commit (or tag) used for testing.
|
||||
# BRANCH_OR_COMMIT = os.environ['BRANCH_OR_COMMIT']
|
||||
|
||||
|
||||
def main():
|
||||
"""Begin main program.
|
||||
|
||||
This is the main program code.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
None
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
|
||||
Raises
|
||||
------
|
||||
subprocess.CalledProcessError
|
||||
If an exception occurs in subprocess.run()
|
||||
"""
|
||||
# Set up the command-line parser.
|
||||
parser = common.create_command_line_parser(DESCRIPTION)
|
||||
|
||||
# Parse the command-line arguments.
|
||||
args = parser.parse_args()
|
||||
if args.debug:
|
||||
print(f"args = {args}")
|
||||
debug = args.debug
|
||||
be_loud = args.loud
|
||||
is_test = args.test
|
||||
slack_on_fail = args.slack_on_fail
|
||||
verbose = args.verbose
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
if debug:
|
||||
print(f"Starting {sys.argv[0]} at {datetime.datetime.now()}")
|
||||
print(f"Current directory is {os.getcwd()}")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
# # Detail the test results
|
||||
test_report_details_string = ''
|
||||
test_report_details_string += (
|
||||
'Test results are on `derecho` in '
|
||||
f"`{os.environ['MAGE_TEST_SET_ROOT']}`.\n"
|
||||
)
|
||||
|
||||
# Summarize the test results.
|
||||
test_report_summary_string = 'This was a null test.'
|
||||
|
||||
# Print the test results summary and details.
|
||||
print(test_report_summary_string)
|
||||
print(test_report_details_string)
|
||||
|
||||
# If loud mode is on, post report to Slack.
|
||||
if be_loud:
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_summary_string, is_test=is_test
|
||||
)
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
if debug:
|
||||
print(f"Ending {sys.argv[0]} at {datetime.datetime.now()}")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -4,8 +4,31 @@
|
||||
|
||||
This script runs a series of unit tests of the MAGE Fortran software. These
|
||||
tests are run as PBS jobs on derecho. There will be one job which generates
|
||||
the data for testing, then 1 or more dependent jobs that use the newly-
|
||||
generated data for unit testing, then a job for the test report.
|
||||
the data for testing, then 3 dependent jobs that use the newly-generated data
|
||||
for unit testing, then a job for the test report.
|
||||
|
||||
There are 5 PBS job scripts used per module set. Each is generated from a
|
||||
jinja2 template.
|
||||
|
||||
1. genTestData.pbs - Data generation. Runs in about 17 minutes on 5 derecho
|
||||
nodes. Output in PBS job file genTestData.o*, and geo_mpi.out.
|
||||
|
||||
2. runCaseTests.pbs - Runs in about 17 minutes on 1 derecho node. Only runs if
|
||||
genTestData.pbs completes successfully. Output in PBS log file
|
||||
runCaseTests.o*, caseTests.out, and caseMpiTests.out.
|
||||
|
||||
3. runNonCaseTests1.pbs - Runs in about 2 minutes on 1 derecho node. Only runs
|
||||
if genTestData.pbs completes successfully. Output in PBS log file
|
||||
runNonCaseTests1.o*, gamTests.out, mixTests.out, voltTests.out,
|
||||
baseMpiTests.out, gamMpiTests.out.
|
||||
|
||||
4. runNonCaseTests2.pbs - Runs in about XX minutes on 2 derecho nodes. Only
|
||||
runs if genTestData.pbs completes successfully. Output in PBS log file
|
||||
runNonCaseTests2.o*, and voltMpiTests.out.
|
||||
|
||||
5. unitTestReport.pbs - Report generation. Runs in about XX minutes on 1
|
||||
derecho node. Only runs if jobs 2-4 complete successfully. Output in PBS
|
||||
log file unitTestReport.o*, and unitTestReport.out.
|
||||
|
||||
NOTE: If this script is run as part of a set of tests for run_mage_tests.sh,
|
||||
this script must be listed *last*, since it makes changes to the kaiju source
|
||||
@@ -21,16 +44,16 @@ Eric Winter
|
||||
|
||||
# Import standard modules.
|
||||
import datetime
|
||||
# import glob
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# Import 3rd-party modules.
|
||||
from jinja2 import Template
|
||||
|
||||
# Import project modules.
|
||||
from kaipy.testing import common
|
||||
import common
|
||||
|
||||
|
||||
# Program constants
|
||||
@@ -38,6 +61,9 @@ from kaipy.testing import common
|
||||
# Program description.
|
||||
DESCRIPTION = 'Script for MAGE Fortran unit testing'
|
||||
|
||||
# Home directory of kaiju installation
|
||||
KAIJUHOME = os.environ['KAIJUHOME']
|
||||
|
||||
# Root of directory tree for this set of tests.
|
||||
MAGE_TEST_SET_ROOT = os.environ['MAGE_TEST_SET_ROOT']
|
||||
|
||||
@@ -52,7 +78,8 @@ PFUNIT_HOME = os.path.join(
|
||||
MAGE_TEST_ROOT, 'pfunit', 'pFUnit-4.2.0', 'ifort-23-mpich-derecho'
|
||||
)
|
||||
|
||||
# List of pFUnit directories to copy from PFUNIT_HOME into kaiju/external
|
||||
# List of pFUnit directories to copy from PFUNIT_HOME into
|
||||
# kaiju_private/external
|
||||
PFUNIT_BINARY_DIRECTORIES = [
|
||||
'FARGPARSE-1.1',
|
||||
'GFTL-1.3',
|
||||
@@ -60,15 +87,9 @@ PFUNIT_BINARY_DIRECTORIES = [
|
||||
'PFUNIT-4.2',
|
||||
]
|
||||
|
||||
# Home directory of kaiju installation
|
||||
KAIJUHOME = os.environ['KAIJUHOME']
|
||||
|
||||
# Path to kaiju subdirectory for external code
|
||||
KAIJU_EXTERNAL_DIRECTORY = os.path.join(KAIJUHOME, 'external')
|
||||
|
||||
# Prefix for naming unit test directories
|
||||
UNIT_TEST_DIRECTORY_PREFIX = 'unitTest_'
|
||||
|
||||
# Path to directory containing the test scripts
|
||||
TEST_SCRIPTS_DIRECTORY = os.path.join(KAIJUHOME, 'testingScripts')
|
||||
|
||||
@@ -79,17 +100,31 @@ MODULE_LIST_DIRECTORY = os.path.join(TEST_SCRIPTS_DIRECTORY,
|
||||
# Name of file containing names of modules lists to use for unit tests
|
||||
UNIT_TEST_LIST_FILE = os.path.join(MODULE_LIST_DIRECTORY, 'unit_test.lst')
|
||||
|
||||
# Path to directory containing unit test files.
|
||||
TESTS_DIRECTORY = os.path.join(KAIJUHOME, 'tests')
|
||||
# Path to directory containing the unit test scripts
|
||||
UNIT_TEST_SCRIPTS_DIRECTORY = os.path.join(KAIJUHOME, 'tests')
|
||||
|
||||
# PBS scripts for unit test jobs.
|
||||
UNIT_TEST_PBS_SCRIPTS = [
|
||||
'genTestData.pbs',
|
||||
'runCaseTests.pbs',
|
||||
'runNonCaseTests1.pbs',
|
||||
# 'runNonCaseTests2.pbs', # Hangs for 12 hours
|
||||
'unitTestReport.pbs',
|
||||
]
|
||||
# Paths to jinja2 template files for PBS scripts.
|
||||
DATA_GENERATION_PBS_TEMPLATE = os.path.join(
|
||||
UNIT_TEST_SCRIPTS_DIRECTORY, 'genTestData-template.pbs'
|
||||
)
|
||||
RUN_CASE_TESTS_PBS_TEMPLATE = os.path.join(
|
||||
UNIT_TEST_SCRIPTS_DIRECTORY, 'runCaseTests-template.pbs'
|
||||
)
|
||||
RUN_NON_CASE_TESTS_1_PBS_TEMPLATE = os.path.join(
|
||||
UNIT_TEST_SCRIPTS_DIRECTORY, 'runNonCaseTests1-template.pbs'
|
||||
)
|
||||
RUN_NON_CASE_TESTS_2_PBS_TEMPLATE = os.path.join(
|
||||
UNIT_TEST_SCRIPTS_DIRECTORY, 'runNonCaseTests2-template.pbs'
|
||||
)
|
||||
UNIT_TEST_REPORT_PBS_TEMPLATE = os.path.join(
|
||||
UNIT_TEST_SCRIPTS_DIRECTORY, 'unitTestReport-template.pbs'
|
||||
)
|
||||
|
||||
# Prefix for naming unit test directories
|
||||
UNIT_TEST_DIRECTORY_PREFIX = 'unitTest_'
|
||||
|
||||
# Name of build subdirectory containing binaries
|
||||
BUILD_BIN_DIR = 'bin'
|
||||
|
||||
# Input files for unit tests
|
||||
UNIT_TEST_DATA_INPUT_DIRECTORY = os.path.join(
|
||||
@@ -102,17 +137,15 @@ UNIT_TEST_DATA_INPUT_FILES = [
|
||||
'rcmconfig.h5',
|
||||
]
|
||||
|
||||
# Name of build subdirectory containing binaries
|
||||
BUILD_BIN_DIR = 'bin'
|
||||
# Names of PBS scripts to create from templates.
|
||||
DATA_GENERATION_PBS_SCRIPT = 'genTestData.pbs'
|
||||
RUN_CASE_TESTS_PBS_SCRIPT = 'runCaseTests.pbs'
|
||||
RUN_NON_CASE_TESTS_1_PBS_SCRIPT = 'runNonCaseTests1.pbs'
|
||||
RUN_NON_CASE_TESTS_2_PBS_SCRIPT = 'runNonCaseTests2.pbs'
|
||||
UNIT_TEST_REPORT_PBS_SCRIPT = 'unitTestReport.pbs'
|
||||
|
||||
# Name of PBS account to use for testing jobs.
|
||||
DERECHO_TESTING_ACCOUNT = os.environ['DERECHO_TESTING_ACCOUNT']
|
||||
|
||||
# Token string for access to Slack.
|
||||
SLACK_BOT_TOKEN = os.environ['SLACK_BOT_TOKEN']
|
||||
|
||||
# Branch or commit (or tag) used for testing.
|
||||
BRANCH_OR_COMMIT = os.environ['BRANCH_OR_COMMIT']
|
||||
# Name of file to hold job list.
|
||||
JOB_LIST_FILE = 'jobs.txt'
|
||||
|
||||
|
||||
def main():
|
||||
@@ -140,9 +173,9 @@ def main():
|
||||
args = parser.parse_args()
|
||||
if args.debug:
|
||||
print(f"args = {args}")
|
||||
account = args.account
|
||||
debug = args.debug
|
||||
be_loud = args.loud
|
||||
slack_on_fail = args.slack_on_fail
|
||||
is_test = args.test
|
||||
verbose = args.verbose
|
||||
|
||||
@@ -154,7 +187,7 @@ def main():
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Make a directory to hold all of the unit tests.
|
||||
# Make a directory to hold all of the Fortran unit tests.
|
||||
if verbose:
|
||||
print(f"Creating ${UNIT_TEST_DIRECTORY}.")
|
||||
os.mkdir(UNIT_TEST_DIRECTORY)
|
||||
@@ -167,11 +200,15 @@ def main():
|
||||
for directory in PFUNIT_BINARY_DIRECTORIES:
|
||||
from_path = os.path.join(PFUNIT_HOME, directory)
|
||||
to_path = os.path.join(KAIJU_EXTERNAL_DIRECTORY, directory)
|
||||
if debug:
|
||||
print(f"Copying {from_path} to {to_path}.")
|
||||
shutil.copytree(from_path, to_path)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Make a list of module sets to build with.
|
||||
if verbose:
|
||||
print(f"Reading module set list from {UNIT_TEST_LIST_FILE}.")
|
||||
|
||||
# Read the list of module sets to use for unit tests.
|
||||
with open(UNIT_TEST_LIST_FILE, encoding='utf-8') as f:
|
||||
@@ -182,20 +219,71 @@ def main():
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Initalize job ID to None for all module set/PBS script combinations.
|
||||
job_ids = []
|
||||
for _ in module_list_files:
|
||||
job_ids.append([None]*len(UNIT_TEST_PBS_SCRIPTS))
|
||||
if verbose:
|
||||
print('Reading templates for PBS scripts.')
|
||||
|
||||
# Read the template for the PBS script used for the test data generation.
|
||||
with open(DATA_GENERATION_PBS_TEMPLATE, 'r', encoding='utf-8') as f:
|
||||
template_content = f.read()
|
||||
data_generation_pbs_template = Template(template_content)
|
||||
if debug:
|
||||
print(f"data_generation_pbs_template = {data_generation_pbs_template}")
|
||||
|
||||
# Read the template for the PBS script used for the case tests.
|
||||
with open(RUN_CASE_TESTS_PBS_TEMPLATE, 'r', encoding='utf-8') as f:
|
||||
template_content = f.read()
|
||||
run_case_tests_pbs_template = Template(template_content)
|
||||
if debug:
|
||||
print(f"run_case_tests_pbs_template = {run_case_tests_pbs_template}")
|
||||
|
||||
# Read the template for the PBS script used for the 1st non-case tests.
|
||||
with open(RUN_NON_CASE_TESTS_1_PBS_TEMPLATE, 'r', encoding='utf-8') as f:
|
||||
template_content = f.read()
|
||||
run_non_case_tests_1_pbs_template = Template(template_content)
|
||||
if debug:
|
||||
print('run_non_case_tests_1_pbs_template = '
|
||||
f"{run_non_case_tests_1_pbs_template}")
|
||||
|
||||
# Read the template for the PBS script used for the 2nd non-case tests.
|
||||
with open(RUN_NON_CASE_TESTS_2_PBS_TEMPLATE, 'r', encoding='utf-8') as f:
|
||||
template_content = f.read()
|
||||
run_non_case_tests_2_pbs_template = Template(template_content)
|
||||
if debug:
|
||||
print('run_non_case_tests_2_pbs_template = '
|
||||
f"{run_non_case_tests_2_pbs_template}")
|
||||
|
||||
# Read the template for the PBS script used for report generation.
|
||||
with open(UNIT_TEST_REPORT_PBS_TEMPLATE, 'r', encoding='utf-8') as f:
|
||||
template_content = f.read()
|
||||
unit_test_report_pbs_template = Template(template_content)
|
||||
if debug:
|
||||
print('unit_test_report_pbs_template = '
|
||||
f"{unit_test_report_pbs_template}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Create the common make command for all module sets.
|
||||
make_cmd = 'make gamera_mpi voltron_mpi allTests'
|
||||
if debug:
|
||||
print(f"make_cmd = {make_cmd}")
|
||||
|
||||
# Create the list for submit results. Only set to True if all qsub commands
|
||||
# for a set are OK.
|
||||
submit_ok = [False]*len(module_list_files)
|
||||
if debug:
|
||||
print(f"submit_ok = {submit_ok}")
|
||||
|
||||
# Create a list of lists for job IDs. There are 5 job IDs per set - one for
|
||||
# data generration, case tests, non-case tests 1, non-case tests 2, and the
|
||||
# test report.
|
||||
job_ids = [[None, None, None, None, None]]*len(module_list_files)
|
||||
if debug:
|
||||
print(f"job_ids = {job_ids}")
|
||||
|
||||
# Run the unit tests with each set of modules.
|
||||
for (i_set, module_list_file) in enumerate(module_list_files):
|
||||
for (i_module_set, module_list_file) in enumerate(module_list_files):
|
||||
if verbose:
|
||||
print('Performing initial condition build tests with module set '
|
||||
print('Performing unit tests tests with module set '
|
||||
f"{module_list_file}.")
|
||||
|
||||
# Extract the name of the list.
|
||||
@@ -203,6 +291,8 @@ def main():
|
||||
if debug:
|
||||
print(f"module_set_name = {module_set_name}.")
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
# Read this module list file, extracting cmake environment and
|
||||
# options, if any.
|
||||
path = os.path.join(MODULE_LIST_DIRECTORY, module_list_file)
|
||||
@@ -216,18 +306,20 @@ def main():
|
||||
print(f"cmake_environment = {cmake_environment}")
|
||||
print(f"cmake_options = {cmake_options}")
|
||||
|
||||
# Assemble the commands to load the listed modules.
|
||||
# <HACK>
|
||||
# Extra argument needed for unit test build.
|
||||
cmake_options += ' -DCMAKE_BUILD_TYPE=RELWITHDEBINFO'
|
||||
if debug:
|
||||
print(f"cmake_options = {cmake_options}")
|
||||
# </HACK>
|
||||
|
||||
# Assemble the command to load the listed modules.
|
||||
module_cmd = (
|
||||
f"module --force purge; module load {' '.join(module_names)}"
|
||||
)
|
||||
if debug:
|
||||
print(f"module_cmd = {module_cmd}")
|
||||
|
||||
# <HACK>
|
||||
# Extra argument needed for unit test build.
|
||||
cmake_options += ' -DCMAKE_BUILD_TYPE=RELWITHDEBINFO'
|
||||
# </HACK>
|
||||
|
||||
# Make a directory for this test, and go there.
|
||||
dir_name = f"{UNIT_TEST_DIRECTORY_PREFIX}{module_set_name}"
|
||||
build_directory = os.path.join(UNIT_TEST_DIRECTORY, dir_name)
|
||||
@@ -250,7 +342,7 @@ def main():
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
# NOTE: stdout and stderr goes cmake.out.
|
||||
_ = subprocess.run(cmd, shell=True, check=True)
|
||||
cproc = subprocess.run(cmd, shell=True, check=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(
|
||||
f"ERROR: cmake for module set {module_set_name} failed.\n"
|
||||
@@ -287,14 +379,20 @@ def main():
|
||||
)
|
||||
continue
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
# Assemble common data to fill in the PBS templates.
|
||||
pbs_options = {}
|
||||
pbs_options['account'] = os.environ['DERECHO_TESTING_ACCOUNT']
|
||||
pbs_options['queue'] = os.environ['DERECHO_TESTING_QUEUE']
|
||||
pbs_options['job_priority'] = os.environ['DERECHO_TESTING_PRIORITY']
|
||||
pbs_options['modules'] = module_names
|
||||
pbs_options['kaijuhome'] = KAIJUHOME
|
||||
|
||||
# Go to the bin directory for testing.
|
||||
os.chdir(BUILD_BIN_DIR)
|
||||
|
||||
# Copy in the PBS scripts for unit testing.
|
||||
for filename in UNIT_TEST_PBS_SCRIPTS:
|
||||
from_path = os.path.join(TESTS_DIRECTORY, filename)
|
||||
to_path = os.path.join('.', filename)
|
||||
shutil.copyfile(from_path, to_path)
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
# Copy in inputs for unit test data generation.
|
||||
for filename in UNIT_TEST_DATA_INPUT_FILES:
|
||||
@@ -302,132 +400,283 @@ def main():
|
||||
UNIT_TEST_DATA_INPUT_DIRECTORY, filename
|
||||
)
|
||||
to_path = os.path.join('.', filename)
|
||||
if debug:
|
||||
print(f"Copying {from_path} to {to_path}.")
|
||||
shutil.copyfile(from_path, to_path)
|
||||
|
||||
# Submit the jobs to create the test data and run the unit
|
||||
# tests. Note that the unit test jobs will only run if the
|
||||
# data generation job completes successfully.
|
||||
for (j_pbs, pbs_file) in enumerate(UNIT_TEST_PBS_SCRIPTS):
|
||||
job_id = None
|
||||
cmd = (
|
||||
f"qsub -A {DERECHO_TESTING_ACCOUNT} "
|
||||
f"-v MODULE_LIST='{' '.join(module_names)}',"
|
||||
f"KAIJUROOTDIR={KAIJUHOME},"
|
||||
f"MAGE_TEST_SET_ROOT={MAGE_TEST_SET_ROOT},"
|
||||
f"DERECHO_TESTING_ACCOUNT={DERECHO_TESTING_ACCOUNT},"
|
||||
f"SLACK_BOT_TOKEN={SLACK_BOT_TOKEN}"
|
||||
)
|
||||
# <HACK>
|
||||
# Assumes data generation job is first.
|
||||
if j_pbs > 0:
|
||||
cmd += f" -W depend=afterok:{job_ids[i_set][0]}"
|
||||
# </HACK>
|
||||
# <HACK>
|
||||
# Assumes report generation job is last.
|
||||
if pbs_file == 'unitTestReport.pbs':
|
||||
cmd += f" -W depend=afterok:{':'.join(job_ids[i_set][1:-1])}"
|
||||
# </HACK>
|
||||
cmd += f" {pbs_file}"
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
text=True, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as e:
|
||||
path = os.path.join(
|
||||
build_directory, BUILD_BIN_DIR, f"qsub_{j_pbs}.out"
|
||||
)
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
f.write(e.stdout)
|
||||
print(
|
||||
'ERROR: Job submission failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
f"See {path} for output from qsub.\n"
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}.",
|
||||
file=sys.stderr
|
||||
)
|
||||
continue
|
||||
# Set options specific to the data generation job, then render the
|
||||
# template.
|
||||
pbs_options['job_name'] = 'genTestData'
|
||||
pbs_options['walltime'] = '00:30:00'
|
||||
pbs_content = data_generation_pbs_template.render(pbs_options)
|
||||
if verbose:
|
||||
print(f"Creating {DATA_GENERATION_PBS_SCRIPT}.")
|
||||
with open(DATA_GENERATION_PBS_SCRIPT, 'w', encoding='utf-8') as f:
|
||||
f.write(pbs_content)
|
||||
|
||||
# Save the job ID.
|
||||
job_id = cproc.stdout.split('.')[0]
|
||||
if debug:
|
||||
print(f"job_id = {job_id}")
|
||||
job_ids[i_set][j_pbs] = job_id
|
||||
# End of loop over PBS scripts
|
||||
# Run the data generation job.
|
||||
cmd = f"qsub {DATA_GENERATION_PBS_SCRIPT}"
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
text=True, capture_output=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print('ERROR: qsub failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See test log for output.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}.",
|
||||
file=sys.stderr)
|
||||
continue
|
||||
job_id = cproc.stdout.split('.')[0]
|
||||
job_ids[i_module_set][0] = job_id
|
||||
if debug:
|
||||
print(f"job_id = {job_id}")
|
||||
print(f"job_ids = {job_ids}")
|
||||
|
||||
# Record the job IDs in a text file.
|
||||
with open('jobs.txt', 'w', encoding='utf-8') as f:
|
||||
for job_id in job_ids[i_set]:
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
# Set options specific to the case tests job, then render the
|
||||
# template.
|
||||
pbs_options['job_name'] = 'runCaseTests'
|
||||
pbs_options['walltime'] = '00:40:00'
|
||||
pbs_content = run_case_tests_pbs_template.render(pbs_options)
|
||||
if verbose:
|
||||
print(f"Creating {RUN_CASE_TESTS_PBS_SCRIPT}.")
|
||||
with open(RUN_CASE_TESTS_PBS_SCRIPT, 'w', encoding='utf-8') as f:
|
||||
f.write(pbs_content)
|
||||
|
||||
# Run the case tests job if data was generated.
|
||||
cmd = (
|
||||
f"qsub -W depend=afterok:{job_ids[i_module_set][0]} "
|
||||
f"{RUN_CASE_TESTS_PBS_SCRIPT}"
|
||||
)
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
text=True, capture_output=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print('ERROR: qsub failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See test log for output.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}.",
|
||||
file=sys.stderr)
|
||||
continue
|
||||
job_id = cproc.stdout.split('.')[0]
|
||||
if debug:
|
||||
print(f"job_id = {job_id}")
|
||||
job_ids[i_module_set][1] = job_id
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
# Set options specific to the 1st non-case tests job, then render the
|
||||
# template.
|
||||
pbs_options['job_name'] = 'runNonCaseTests1'
|
||||
pbs_options['walltime'] = '00:05:00'
|
||||
if verbose:
|
||||
print(f"Creating {RUN_NON_CASE_TESTS_1_PBS_SCRIPT}.")
|
||||
pbs_content = run_non_case_tests_1_pbs_template.render(pbs_options)
|
||||
with open(RUN_NON_CASE_TESTS_1_PBS_SCRIPT, 'w', encoding='utf-8') as f:
|
||||
f.write(pbs_content)
|
||||
|
||||
# Run the 1st non-case tests job if data was generated.
|
||||
cmd = (
|
||||
f"qsub -W depend=afterok:{job_ids[i_module_set][0]} "
|
||||
f"{RUN_NON_CASE_TESTS_1_PBS_SCRIPT}"
|
||||
)
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
text=True, capture_output=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print('ERROR: qsub failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See test log for output.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}.",
|
||||
file=sys.stderr)
|
||||
continue
|
||||
job_id = cproc.stdout.split('.')[0]
|
||||
if debug:
|
||||
print(f"job_id = {job_id}")
|
||||
job_ids[i_module_set][2] = job_id
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
# Set options specific to the 2nd non-case tests job, then render the
|
||||
# template.
|
||||
pbs_options['job_name'] = 'runNonCaseTests2'
|
||||
pbs_options['walltime'] = '12:00:00'
|
||||
pbs_content = run_non_case_tests_2_pbs_template.render(pbs_options)
|
||||
with open(RUN_NON_CASE_TESTS_2_PBS_SCRIPT, 'w', encoding='utf-8') as f:
|
||||
f.write(pbs_content)
|
||||
|
||||
# Run the 2nd non-case tests job if data was generated.
|
||||
cmd = (
|
||||
f"qsub -W depend=afterok:{job_ids[i_module_set][0]} "
|
||||
f"{RUN_NON_CASE_TESTS_2_PBS_SCRIPT}"
|
||||
)
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
text=True, capture_output=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print('ERROR: qsub failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See test log for output.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}.",
|
||||
file=sys.stderr)
|
||||
continue
|
||||
job_id = cproc.stdout.split('.')[0]
|
||||
if debug:
|
||||
print(f"job_id = {job_id}")
|
||||
job_ids[i_module_set][3] = job_id
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
# Set options specific to the report generation job, then render the
|
||||
# template.
|
||||
pbs_options['job_name'] = 'unitTestReport'
|
||||
pbs_options['walltime'] = '00:10:00'
|
||||
pbs_options['slack_bot_token'] = os.environ['SLACK_BOT_TOKEN']
|
||||
pbs_options['mage_test_root'] = os.environ['MAGE_TEST_ROOT']
|
||||
pbs_options['mage_test_set_root'] = os.environ['MAGE_TEST_SET_ROOT']
|
||||
pbs_options['report_options'] = ''
|
||||
if debug:
|
||||
pbs_options['report_options'] += ' -d'
|
||||
pbs_options['report_options'] += ' -l' # Always post report.
|
||||
if slack_on_fail:
|
||||
pbs_options['report_options'] += ' -s'
|
||||
if is_test:
|
||||
pbs_options['report_options'] += ' -t'
|
||||
if verbose:
|
||||
pbs_options['report_options'] += ' -v'
|
||||
pbs_content = unit_test_report_pbs_template.render(pbs_options)
|
||||
with open(UNIT_TEST_REPORT_PBS_SCRIPT, 'w', encoding='utf-8') as f:
|
||||
f.write(pbs_content)
|
||||
|
||||
# Run the report generation job if all others ran OK.
|
||||
cmd = (
|
||||
f"qsub -W depend=afterok:{':'.join(job_ids[i_module_set][1:4])} "
|
||||
f"{UNIT_TEST_REPORT_PBS_SCRIPT}"
|
||||
)
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
text=True, capture_output=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print('ERROR: qsub failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See test log for output.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}.",
|
||||
file=sys.stderr)
|
||||
continue
|
||||
job_id = cproc.stdout.split('.')[0]
|
||||
if debug:
|
||||
print(f"job_id = {job_id}")
|
||||
job_ids[i_module_set][4] = job_id
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
# Record the job IDs for this module set in a file.
|
||||
if verbose:
|
||||
print(f"Saving job IDs for module set {module_set_name} "
|
||||
f"in {JOB_LIST_FILE}.")
|
||||
with open(JOB_LIST_FILE, 'w', encoding='utf-8') as f:
|
||||
for job_id in job_ids[i_module_set]:
|
||||
f.write(f"{job_id}\n")
|
||||
|
||||
# This module set worked.
|
||||
submit_ok[i_module_set] = True
|
||||
|
||||
# End of loop over module sets
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Set up for communication with Slack.
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
print(f"submit_ok = {submit_ok}")
|
||||
print(f"job_ids = {job_ids}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# NOTE: Assumes only 1 module set was used.
|
||||
|
||||
# Detail the test results
|
||||
test_report_details_string = ''
|
||||
test_report_details_string += (
|
||||
f"Test results are in {os.getcwd()}.\n"
|
||||
)
|
||||
test_report_details_string += (
|
||||
'Fortran unit test PBS job script `genTestData.pbs` submitted as job '
|
||||
f"{job_ids[0][0]}.\n"
|
||||
)
|
||||
test_report_details_string += (
|
||||
'Fortran unit test PBS job script `runCaseTests.pbs` submitted as job '
|
||||
f"{job_ids[0][1]}.\n"
|
||||
)
|
||||
test_report_details_string += (
|
||||
'Fortran unit test PBS job script `runNonCaseTests1.pbs` submitted as'
|
||||
f" job {job_ids[0][2]}.\n"
|
||||
)
|
||||
test_report_details_string += (
|
||||
'Fortran unit test PBS job script `runNonCaseTests2.pbs` skipped'
|
||||
' since it currently hangs on `derecho`.\n'
|
||||
)
|
||||
test_report_details_string += (
|
||||
'Fortran unit test report PBS job script `unitTestReport.pbs` submitted as'
|
||||
f" job {job_ids[0][3]}.\n"
|
||||
f"Test results are in `{UNIT_TEST_DIRECTORY}`.\n"
|
||||
)
|
||||
for (i_module_set, module_list_file) in enumerate(module_list_files):
|
||||
if not submit_ok[i_module_set]:
|
||||
test_report_details_string += (
|
||||
f"Module set `{module_list_file}`: *FAILED*"
|
||||
)
|
||||
continue
|
||||
test_report_details_string += (
|
||||
f"`{DATA_GENERATION_PBS_SCRIPT}` for module set "
|
||||
f"`{module_list_file}` submitted as PBS job "
|
||||
f"{job_ids[i_module_set][0]}.\n"
|
||||
)
|
||||
test_report_details_string += (
|
||||
f"`{RUN_CASE_TESTS_PBS_SCRIPT}` for module set "
|
||||
f"`{module_list_file}` submitted as PBS job "
|
||||
f"{job_ids[i_module_set][1]}.\n"
|
||||
)
|
||||
test_report_details_string += (
|
||||
f"`{RUN_NON_CASE_TESTS_1_PBS_SCRIPT}` for module set "
|
||||
f"`{module_list_file}` submitted as PBS job "
|
||||
f"{job_ids[i_module_set][2]}.\n"
|
||||
)
|
||||
test_report_details_string += (
|
||||
f"`{RUN_NON_CASE_TESTS_2_PBS_SCRIPT}` for module set "
|
||||
f"`{module_list_file}` submitted as PBS job "
|
||||
f"{job_ids[i_module_set][3]}.\n"
|
||||
)
|
||||
test_report_details_string += (
|
||||
f"`{UNIT_TEST_REPORT_PBS_SCRIPT}` for module set "
|
||||
f"`{module_list_file}` submitted as PBS job "
|
||||
f"{job_ids[i_module_set][4]}.\n"
|
||||
)
|
||||
|
||||
# Summarize the test results
|
||||
test_report_summary_string = (
|
||||
'Fortran unit tests submitted by `unitTest.py`'
|
||||
f" for branch or commit or tag {BRANCH_OR_COMMIT}\n"
|
||||
)
|
||||
if 'FAILED' in test_report_details_string:
|
||||
test_report_summary_string = (
|
||||
'Fortran unit test submission: *FAILED*'
|
||||
)
|
||||
else:
|
||||
test_report_summary_string = (
|
||||
'Fortran unit test submission: *PASSED*'
|
||||
)
|
||||
|
||||
# Print the test results summary and details.
|
||||
print(test_report_summary_string)
|
||||
print(test_report_details_string)
|
||||
|
||||
# If loud mode is on, post report to Slack.
|
||||
if be_loud:
|
||||
test_report_summary_string += 'Details in thread for this messsage.\n'
|
||||
# If a test failed, or loud mode is on, post report to Slack.
|
||||
if (slack_on_fail and 'FAILED' in test_report_details_string) or be_loud:
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_summary_string, is_test=is_test
|
||||
)
|
||||
if slack_response_summary['ok']:
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_details = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if 'ok' not in slack_response_details:
|
||||
print('*ERROR* Unable to post test details to Slack.')
|
||||
else:
|
||||
print('*ERROR* Unable to post test summary to Slack.')
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ import sys
|
||||
# Import 3rd-party modules.
|
||||
|
||||
# Import project modules.
|
||||
from kaipy.testing import common
|
||||
import common
|
||||
|
||||
|
||||
# Program constants
|
||||
@@ -75,6 +75,7 @@ def main():
|
||||
print(f"args = {args}")
|
||||
debug = args.debug
|
||||
be_loud = args.loud
|
||||
slack_on_fail = args.slack_on_fail
|
||||
is_test = args.test
|
||||
verbose = args.verbose
|
||||
|
||||
@@ -135,20 +136,19 @@ def main():
|
||||
# NOTE: This needs to be reorganized.
|
||||
|
||||
# Compute the names of the job log files.
|
||||
job_file_0 = f"testResGen.o{job_ids[0]}"
|
||||
job_file_1 = f"caseTests.o{job_ids[1]}"
|
||||
job_file_2 = f"nonCaseTests1.o{job_ids[2]}"
|
||||
# job_file_3 = f"nonCaseTests2.o{job_ids[3]}" # SKIP FOR NOW
|
||||
job_file_0 = f"genTestData.o{job_ids[0]}"
|
||||
job_file_1 = f"runCaseTests.o{job_ids[1]}"
|
||||
job_file_2 = f"runNonCaseTests1.o{job_ids[2]}"
|
||||
job_file_3 = f"runNonCaseTests2.o{job_ids[3]}" # SKIP FOR NOW
|
||||
if debug:
|
||||
print(f"job_file_o = {job_file_0}")
|
||||
print(f"job_file_0 = {job_file_0}")
|
||||
print(f"job_file_1 = {job_file_1}")
|
||||
print(f"job_file_2 = {job_file_2}")
|
||||
# print(f"job_file_3 = {job_file_3}")
|
||||
print(f"job_file_3 = {job_file_3}")
|
||||
|
||||
# Combine the results of each test log file.
|
||||
bigFile = []
|
||||
# job_files = [job_file_0, job_file_1, job_file_2, job_file_3]
|
||||
job_files = [job_file_0, job_file_1, job_file_2]
|
||||
job_files = [job_file_0, job_file_1, job_file_2, job_file_3]
|
||||
for job_file in job_files:
|
||||
with open(job_file, 'r', encoding='utf-8') as f:
|
||||
bigFile += f.readlines()
|
||||
@@ -164,7 +164,7 @@ def main():
|
||||
elif 'job killed' in line:
|
||||
jobKilled = True
|
||||
|
||||
# There should be exactly 6 OKs (8 if job_file_3 is used).
|
||||
# There should be exactly 8 OKs.
|
||||
if okCount != 6:
|
||||
okFailure = True
|
||||
else:
|
||||
@@ -187,15 +187,6 @@ def main():
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Set up for communication with Slack.
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# NOTE: Assumes only 1 module set was used.
|
||||
|
||||
# Detail the test results
|
||||
test_report_details_string = ''
|
||||
test_report_details_string += (
|
||||
@@ -217,28 +208,29 @@ def main():
|
||||
if myError or jobKilled or okFailure:
|
||||
test_report_summary_string += '*FAILED*\n'
|
||||
else:
|
||||
test_report_summary_string += '*ALL PASSED*\n'
|
||||
test_report_summary_string += '*PASSED*\n'
|
||||
|
||||
# Print the test results summary and details.
|
||||
print(test_report_summary_string)
|
||||
print(test_report_details_string)
|
||||
|
||||
# If loud mode is on, post report to Slack.
|
||||
if be_loud:
|
||||
test_report_summary_string += 'Details in thread for this messsage.\n'
|
||||
# If a test failed, or loud mode is on, post report to Slack.
|
||||
if (slack_on_fail and 'FAILED' in test_report_details_string) or be_loud:
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_summary_string, is_test=is_test
|
||||
)
|
||||
if slack_response_summary['ok']:
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_details = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if 'ok' not in slack_response_details:
|
||||
print('*ERROR* Unable to post test details to Slack.')
|
||||
else:
|
||||
print('*ERROR* Unable to post test summary to Slack.')
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -1,46 +1,28 @@
|
||||
#!/bin/bash
|
||||
|
||||
#PBS -N {{ job_name }}
|
||||
#PBS -A P28100045
|
||||
#PBS -q main
|
||||
#PBS -A {{ account }}
|
||||
#PBS -q {{ queue }}
|
||||
#PBS -l job_priority={{ job_priority }}
|
||||
#PBS -l select=8:ncpus=128:mpiprocs=2:ompthreads=64+3:ncpus=128:mpiprocs=1:ompthreads=128
|
||||
#PBS -l walltime=06:00:00
|
||||
#PBS -l walltime={{ walltime }}
|
||||
#PBS -j oe
|
||||
#PBS -m abe
|
||||
|
||||
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
echo 'Loading modules.'
|
||||
{{ module_cmd }}
|
||||
echo 'The following modules are loaded:'
|
||||
module --force purge
|
||||
{%- for module in modules %}
|
||||
module load {{ module }}
|
||||
{%- endfor %}
|
||||
module list
|
||||
|
||||
echo 'Setting up MAGE environment.'
|
||||
KAIJUROOTDIR={{ KAIJUROOTDIR }}
|
||||
source $KAIJUROOTDIR/scripts/setupEnvironment.sh
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
|
||||
export TMPDIR=/glade/work/ewinter/mage_testing/derecho/tmp
|
||||
export OMP_NUM_THREADS=128
|
||||
export MPI_TYPE_DEPTH=32
|
||||
export KMP_STACKSIZE=128M
|
||||
export MAGE_TEST_ROOT='/glade/work/ewinter/mage_testing/derecho'
|
||||
export MAGE_TEST_SET_ROOT={{ MAGE_TEST_SET_ROOT }}
|
||||
export BRANCH_OR_COMMIT={{ BRANCH_OR_COMMIT }}
|
||||
export SLACK_BOT_TOKEN='xoxb-1065817665921-1413594823303-gUePq3obrqlPmlCHC5E7rKVP'
|
||||
export DERECHO_TESTING_ACCOUNT=P28100045
|
||||
echo 'The active environment variables are:'
|
||||
printenv
|
||||
|
||||
# Run the model.
|
||||
$MPICOMMAND ./voltron_mpi.x weeklyDashGo.xml > weeklyDashGo.out
|
||||
|
||||
# Generate the report.
|
||||
export CONDARC="${MAGE_TEST_ROOT}/condarc"
|
||||
export CONDA_ENVS_PATH="${MAGE_TEST_ROOT}/conda"
|
||||
mage_miniconda3="${MAGE_TEST_ROOT}/miniconda3"
|
||||
echo 'Loading python environment.'
|
||||
mage_test_root='{{ mage_test_root }}'
|
||||
export CONDARC="${mage_test_root}/.condarc"
|
||||
export CONDA_ENVS_PATH="${mage_test_root}/.conda"
|
||||
mage_miniconda3="${mage_test_root}/miniconda3"
|
||||
mage_conda="${mage_miniconda3}/bin/conda"
|
||||
__conda_setup="$($mage_conda 'shell.bash' 'hook' 2> /dev/null)"
|
||||
if [ $? -eq 0 ]; then
|
||||
@@ -54,6 +36,27 @@ else
|
||||
fi
|
||||
unset __conda_setup
|
||||
conda activate kaiju-3.8-testing
|
||||
python $KAIJUHOME/testingScripts/weeklyDashReport.py -dtlv >& weeklyDashReport.out
|
||||
|
||||
echo 'Setting up MAGE environment.'
|
||||
source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
export TMPDIR={{ tmpdir }}
|
||||
export SLACK_BOT_TOKEN={{ slack_bot_token }}
|
||||
export OMP_NUM_THREADS=128
|
||||
export MPI_TYPE_DEPTH=32
|
||||
export KMP_STACKSIZE=128M
|
||||
export MAGE_TEST_ROOT=$mage_test_root
|
||||
export MAGE_TEST_SET_ROOT={{ mage_test_set_root }}
|
||||
export BRANCH_OR_COMMIT={{ branch_or_commit }}
|
||||
echo 'The active environment variables are:'
|
||||
printenv
|
||||
|
||||
# Run the model.
|
||||
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
|
||||
$MPICOMMAND ./voltron_mpi.x weeklyDashGo.xml >& weeklyDashGo.out
|
||||
|
||||
# Generate the report.
|
||||
python $KAIJUHOME/testingScripts/weeklyDashReport.py {{ report_options }} >& weeklyDashReport.out
|
||||
|
||||
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
@@ -15,16 +15,15 @@ Eric Winter
|
||||
# Import standard modules.
|
||||
import datetime
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# Import 3rd-party modules.
|
||||
import jinja2
|
||||
from jinja2 import Template
|
||||
|
||||
# Import project modules.
|
||||
from kaipy.testing import common
|
||||
import common
|
||||
|
||||
|
||||
# Program constants
|
||||
@@ -60,17 +59,15 @@ BIN_DIR = 'bin'
|
||||
# List of weekly dash test files to copy
|
||||
WEEKLY_DASH_TEST_FILES = [
|
||||
'weeklyDashGo.xml',
|
||||
'weeklyDashGo.pbs',
|
||||
]
|
||||
|
||||
# Name of PBS account to use for testing jobs.
|
||||
DERECHO_TESTING_ACCOUNT = os.environ['DERECHO_TESTING_ACCOUNT']
|
||||
# Paths to jinja2 template file for PBS script.
|
||||
WEEKLY_DASH_PBS_TEMPLATE = os.path.join(
|
||||
TEST_SCRIPTS_DIRECTORY, 'weeklyDash-template.pbs'
|
||||
)
|
||||
|
||||
# Token string for access to Slack.
|
||||
SLACK_BOT_TOKEN = os.environ['SLACK_BOT_TOKEN']
|
||||
|
||||
# Branch or commit string for code used in this test.
|
||||
BRANCH_OR_COMMIT = os.environ['BRANCH_OR_COMMIT']
|
||||
# Name of rendered PBS script.
|
||||
WEEKLY_DASH_PBS_SCRIPT = 'weeklyDash.pbs'
|
||||
|
||||
|
||||
def main():
|
||||
@@ -100,19 +97,21 @@ def main():
|
||||
print(f"args = {args}")
|
||||
debug = args.debug
|
||||
be_loud = args.loud
|
||||
slack_on_fail = args.slack_on_fail
|
||||
is_test = args.test
|
||||
verbose = args.verbose
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
if debug:
|
||||
print(f"Starting {sys.argv[0]} at {datetime.datetime.now()}"
|
||||
f" on {platform.node()}")
|
||||
print(f"Starting {sys.argv[0]} at {datetime.datetime.now()}")
|
||||
print(f"Current directory is {os.getcwd()}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Set up for communication with Slack.
|
||||
if verbose:
|
||||
print('Creating Slack client.')
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
@@ -120,7 +119,8 @@ def main():
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Make a directory to hold all of the weekly dash tests.
|
||||
print(f"Creating {WEEKLY_DASH_DIRECTORY}.")
|
||||
if verbose:
|
||||
print(f"Creating {WEEKLY_DASH_DIRECTORY}.")
|
||||
os.mkdir(WEEKLY_DASH_DIRECTORY)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
@@ -136,6 +136,15 @@ def main():
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Read the template for the PBS script used for the test data generation.
|
||||
with open(WEEKLY_DASH_PBS_TEMPLATE, 'r', encoding='utf-8') as f:
|
||||
template_content = f.read()
|
||||
weekly_dash_pbs_template = Template(template_content)
|
||||
if debug:
|
||||
print(f"weekly_dash_pbs_template = {weekly_dash_pbs_template}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Create the make command to build the code.
|
||||
make_cmd = 'make voltron_mpi.x'
|
||||
if debug:
|
||||
@@ -143,8 +152,19 @@ def main():
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Create the list for submit results. Only set to True if all build and
|
||||
# qsub commands for a set are OK.
|
||||
submit_ok = [False]*len(module_list_files)
|
||||
if debug:
|
||||
print(f"submit_ok = {submit_ok}")
|
||||
|
||||
# Create the list of job IDs.
|
||||
job_ids = [None]*len(module_list_files)
|
||||
if debug:
|
||||
print(f"jobs_ids = {job_ids}")
|
||||
|
||||
# Run the weekly dash with each set of modules.
|
||||
for (i_test, module_list_file) in enumerate(module_list_files):
|
||||
for (i_module_set, module_list_file) in enumerate(module_list_files):
|
||||
if verbose:
|
||||
print('Performing weekly dash with module set '
|
||||
f"{module_list_file}")
|
||||
@@ -157,6 +177,10 @@ def main():
|
||||
# Read this module list file, extracting cmake environment and
|
||||
# options, if any.
|
||||
path = os.path.join(MODULE_LIST_DIRECTORY, module_list_file)
|
||||
if debug:
|
||||
print(f"path = {path}")
|
||||
if verbose:
|
||||
print(f"Reading module list file {path}.")
|
||||
module_names, cmake_environment, cmake_options = (
|
||||
common.read_build_module_list_file(path)
|
||||
)
|
||||
@@ -177,16 +201,11 @@ def main():
|
||||
if debug:
|
||||
print(f"cmake_options = {cmake_options}")
|
||||
|
||||
# Assemble the commands to load the listed modules.
|
||||
module_cmd = (
|
||||
f"module --force purge; module load {' '.join(module_names)}"
|
||||
)
|
||||
if debug:
|
||||
print(f"module_cmd = {module_cmd}")
|
||||
|
||||
# Make a directory for this build, and go there.
|
||||
dir_name = f"{WEEKLY_DASH_DIRECTORY_PREFIX}{module_set_name}"
|
||||
build_directory = os.path.join(WEEKLY_DASH_DIRECTORY, dir_name)
|
||||
if verbose:
|
||||
print(f"Creating and moving to build directory {build_directory}.")
|
||||
if debug:
|
||||
print(f"build_directory = {build_directory}")
|
||||
os.mkdir(build_directory)
|
||||
@@ -267,8 +286,7 @@ def main():
|
||||
# Generate the solar wind boundary condition file.
|
||||
if verbose:
|
||||
print('Creating solar wind initial conditions file.')
|
||||
cmd = (
|
||||
'cda2wind.py -t0 2016-08-09T02:00:00 -t1 2016-08-09T12:00:00')
|
||||
cmd = 'cda2wind.py -t0 2016-08-09T02:00:00 -t1 2016-08-09T12:00:00'
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
@@ -309,82 +327,117 @@ def main():
|
||||
to_file = os.path.join('.', filename)
|
||||
shutil.copyfile(from_file, to_file)
|
||||
|
||||
# Assemble data to fill in the PBS template.
|
||||
pbs_options = {}
|
||||
pbs_options['job_name'] = dir_name
|
||||
pbs_options['account'] = os.environ['DERECHO_TESTING_ACCOUNT']
|
||||
pbs_options['queue'] = os.environ['DERECHO_TESTING_QUEUE']
|
||||
pbs_options['job_priority'] = os.environ['DERECHO_TESTING_PRIORITY']
|
||||
pbs_options['walltime'] = '08:00:00'
|
||||
pbs_options['modules'] = module_names
|
||||
pbs_options['kaijuhome'] = KAIJUHOME
|
||||
pbs_options['tmpdir'] = os.environ['TMPDIR']
|
||||
pbs_options['slack_bot_token'] = os.environ['SLACK_BOT_TOKEN']
|
||||
pbs_options['mage_test_root'] = os.environ['MAGE_TEST_ROOT']
|
||||
pbs_options['mage_test_set_root'] = os.environ['MAGE_TEST_SET_ROOT']
|
||||
pbs_options['branch_or_commit'] = os.environ['BRANCH_OR_COMMIT']
|
||||
pbs_options['report_options'] = ''
|
||||
if debug:
|
||||
pbs_options['report_options'] += ' -d'
|
||||
pbs_options['report_options'] += ' -l' # Always report.
|
||||
if slack_on_fail:
|
||||
pbs_options['report_options'] += ' -s'
|
||||
if is_test:
|
||||
pbs_options['report_options'] += ' -t'
|
||||
if verbose:
|
||||
pbs_options['report_options'] += ' -v'
|
||||
|
||||
# Render the job template.
|
||||
pbs_content = weekly_dash_pbs_template.render(pbs_options)
|
||||
with open(WEEKLY_DASH_PBS_SCRIPT, 'w', encoding='utf-8') as f:
|
||||
f.write(pbs_content)
|
||||
|
||||
# Submit the weekly dash job.
|
||||
if verbose:
|
||||
print('Preparing to submit weekly dash model run.')
|
||||
cmd = (
|
||||
f"qsub -A {DERECHO_TESTING_ACCOUNT} "
|
||||
f"-v MODULE_LIST='{' '.join(module_names)}',"
|
||||
f"KAIJUROOTDIR={KAIJUHOME},"
|
||||
f"MAGE_TEST_SET_ROOT={MAGE_TEST_SET_ROOT},"
|
||||
f"DERECHO_TESTING_ACCOUNT={DERECHO_TESTING_ACCOUNT},"
|
||||
f"SLACK_BOT_TOKEN={SLACK_BOT_TOKEN},"
|
||||
f"BRANCH_OR_COMMIT={BRANCH_OR_COMMIT}"
|
||||
' weeklyDashGo.pbs'
|
||||
)
|
||||
print('Submitting weekly dash model run.')
|
||||
cmd = f"qsub {WEEKLY_DASH_PBS_SCRIPT}"
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
text=True, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
text=True, capture_output=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(
|
||||
'ERROR: Unable to submit job request for module set '
|
||||
f"{module_set_name}.\n"
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See testing log for output.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}\n"
|
||||
)
|
||||
print('ERROR: qsub failed.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See test log for output.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}.",
|
||||
file=sys.stderr)
|
||||
continue
|
||||
job_id = cproc.stdout.split('.')[0]
|
||||
if debug:
|
||||
print(f"job_id = {job_id}")
|
||||
|
||||
# Record the job ID.
|
||||
job_ids[i_module_set] = job_id
|
||||
|
||||
# Record successful submission.
|
||||
submit_ok[i_module_set] = True
|
||||
|
||||
# Save the job number in a file.
|
||||
with open('jobs.txt', 'w', encoding='utf-8') as f:
|
||||
f.write(f"{job_id}\n")
|
||||
|
||||
# Detail the test results
|
||||
test_report_details_string = ''
|
||||
test_report_details_string += (
|
||||
f"Test results are in {os.getcwd()}.\n"
|
||||
)
|
||||
test_report_details_string += (
|
||||
f"Weekly dash submitted as job {job_id}."
|
||||
)
|
||||
|
||||
# Summarize the test results.
|
||||
test_report_summary_string = (
|
||||
'Weekly dash submitted by `weeklyDash.py`'
|
||||
f" for branch or commit or tag {BRANCH_OR_COMMIT}\n"
|
||||
)
|
||||
|
||||
# Print the test results summary and details.
|
||||
print(test_report_summary_string)
|
||||
print(test_report_details_string)
|
||||
|
||||
# If loud mode is on, post report to Slack.
|
||||
if be_loud:
|
||||
test_report_summary_string += 'Details in thread for this messsage.\n'
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_summary_string, is_test=is_test
|
||||
)
|
||||
if slack_response_summary['ok']:
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_details = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if 'ok' not in slack_response_details:
|
||||
print('*ERROR* Unable to post test details to Slack.')
|
||||
else:
|
||||
print('*ERROR* Unable to post test summary to Slack.')
|
||||
|
||||
# End of loop over module sets.
|
||||
|
||||
if debug:
|
||||
print(f"submit_ok = {submit_ok}")
|
||||
print(f"job_ids = {job_ids}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Detail the test results
|
||||
test_report_details_string = ''
|
||||
test_report_details_string += (
|
||||
f"Test results are in {WEEKLY_DASH_DIRECTORY}.\n"
|
||||
)
|
||||
for (i_module_set, module_list_file) in enumerate(module_list_files):
|
||||
if not submit_ok[i_module_set]:
|
||||
test_report_details_string += (
|
||||
f"Module set `{module_list_file}` submission: *FAILED*\n"
|
||||
)
|
||||
continue
|
||||
test_report_details_string += (
|
||||
f"Weekly dash for module set `{module_list_file}` submitted as "
|
||||
f"job {job_ids[i_module_set]}.\n"
|
||||
)
|
||||
|
||||
# Summarize the test results.
|
||||
if 'FAILED' in test_report_details_string:
|
||||
test_report_summary_string = 'Weekly dash submission: *FAILED*\n'
|
||||
else:
|
||||
test_report_summary_string = 'Weekly dash submission: *PASSED*\n'
|
||||
|
||||
# Print the test results summary and details.
|
||||
print(test_report_summary_string)
|
||||
print(test_report_details_string)
|
||||
|
||||
# If a test failed, or loud mode is on, post report to Slack.
|
||||
if (slack_on_fail and 'FAILED' in test_report_details_string) or be_loud:
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_summary_string, is_test=is_test
|
||||
)
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
thread_ts = slack_response_summary['ts']
|
||||
slack_response_summary = common.slack_send_message(
|
||||
slack_client, test_report_details_string, thread_ts=thread_ts,
|
||||
is_test=is_test
|
||||
)
|
||||
if debug:
|
||||
print(f"slack_response_summary = {slack_response_summary}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
if debug:
|
||||
|
||||
@@ -28,9 +28,9 @@ import matplotlib.dates as mdates
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
# Import project modules.
|
||||
import common
|
||||
import kaipy.kaiH5 as kh5
|
||||
import kaipy.kaiViz as kv
|
||||
from kaipy.testing import common
|
||||
|
||||
|
||||
# Program constants
|
||||
@@ -99,19 +99,6 @@ VOLTRON_OUTPUT_FILE_DEVELOPMENT = os.path.join(
|
||||
REFERENCE_RESULTS_DIRECTORY_DEVELOPMENT, VOLTRON_OUTPUT_FILE
|
||||
)
|
||||
|
||||
# # Home directory of kaiju installation
|
||||
# KAIJUHOME = os.environ['KAIJUHOME']
|
||||
|
||||
# # Top-level directory for testing
|
||||
# KAIJU_TESTING_HOME = '/glade/work/ewinter/mage_testing/derecho'
|
||||
|
||||
# # Path to directory containing weekly dash results.
|
||||
# WEEKLY_DASH_DIRECTORY = os.path.join(KAIJUHOME, 'weeklyDash_01')
|
||||
|
||||
# # Name of weekly dash log file.
|
||||
# weekly_dash_log_latest = 'weeklyDashGo.out'
|
||||
|
||||
|
||||
# Compute the paths to the quicklook plots for the master branch.
|
||||
MAGNETOSPHERE_QUICKLOOK_MASTER = os.path.join(
|
||||
REFERENCE_RESULTS_DIRECTORY_MASTER, 'qkmsphpic.png'
|
||||
@@ -168,6 +155,7 @@ def main():
|
||||
print(f"args = {args}")
|
||||
debug = args.debug
|
||||
be_loud = args.loud
|
||||
# slack_on_fail = args.slack_on_fail
|
||||
is_test = args.test
|
||||
verbose = args.verbose
|
||||
|
||||
@@ -1032,13 +1020,6 @@ def main():
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Set up for communication with Slack.
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# List the files to post and their comments.
|
||||
images_to_post = [
|
||||
'perfPlots.png',
|
||||
@@ -1069,9 +1050,14 @@ def main():
|
||||
|
||||
# If loud mode is on, post results to Slack.
|
||||
if be_loud:
|
||||
slack_client = common.slack_create_client()
|
||||
if debug:
|
||||
print(f"slack_client = {slack_client}")
|
||||
message = (
|
||||
f"Weekly dash result plots complete on branch {BRANCH_OR_COMMIT}.\n"
|
||||
' Latest comparative results attached as replies to this message.\n'
|
||||
'Weekly dash result plots complete on branch '
|
||||
f"{BRANCH_OR_COMMIT}.\n"
|
||||
' Latest comparative results attached as replies to this '
|
||||
'message.\n'
|
||||
)
|
||||
message += (
|
||||
f"Test results are in {os.getcwd()}.\n"
|
||||
|
||||
35
tests/genTestData-template.pbs
Normal file
35
tests/genTestData-template.pbs
Normal file
@@ -0,0 +1,35 @@
|
||||
#!/bin/bash
|
||||
|
||||
#PBS -N {{ job_name }}
|
||||
#PBS -A {{ account }}
|
||||
#PBS -q {{ queue }}
|
||||
#PBS -l job_priority={{ job_priority }}
|
||||
#PBS -l walltime={{ walltime }}
|
||||
#PBS -l select=4:ncpus=128:mpiprocs=2:ompthreads=64+1:ncpus=128:mpiprocs=1:ompthreads=128
|
||||
#PBS -j oe
|
||||
#PBS -m abe
|
||||
|
||||
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
echo 'Loading modules.'
|
||||
module --force purge
|
||||
{%- for module in modules %}
|
||||
module load {{ module }}
|
||||
{%- endfor %}
|
||||
module list
|
||||
|
||||
echo 'Setting up MAGE environment.'
|
||||
source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
export OMP_NUM_THREADS=128
|
||||
export MPI_TYPE_DEPTH=32
|
||||
export KMP_STACKSIZE=128M
|
||||
echo 'The active environment variables are:'
|
||||
printenv
|
||||
|
||||
echo 'Generating data for testing.'
|
||||
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
|
||||
$MPICOMMAND ./voltron_mpi.x geo_mpi.xml >& geo_mpi.out
|
||||
|
||||
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."
|
||||
42
tests/runCaseTests-template.pbs
Normal file
42
tests/runCaseTests-template.pbs
Normal file
@@ -0,0 +1,42 @@
|
||||
#!/bin/bash
|
||||
|
||||
#PBS -N {{ job_name }}
|
||||
#PBS -A {{ account }}
|
||||
#PBS -q {{ queue }}
|
||||
#PBS -l job_priority={{ job_priority }}
|
||||
#PBS -l walltime={{ walltime }}
|
||||
#PBS -l select=1:ncpus=128:mpiprocs=8:ompthreads=16
|
||||
#PBS -j oe
|
||||
#PBS -m abe
|
||||
|
||||
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
echo 'Loading modules.'
|
||||
module --force purge
|
||||
{%- for module in modules %}
|
||||
module load {{ module }}
|
||||
{%- endfor %}
|
||||
module list
|
||||
|
||||
echo 'Setting up MAGE environment.'
|
||||
source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
export OMP_NUM_THREADS=128
|
||||
export MPI_TYPE_DEPTH=32
|
||||
export KMP_STACKSIZE=128M
|
||||
echo 'The active environment variables are:'
|
||||
printenv
|
||||
|
||||
echo 'Running non-MPI test cases.'
|
||||
./caseTests >& caseTests.out
|
||||
echo 'Non-MPI test cases complete.'
|
||||
echo | tail -n 3 ./caseTests.out
|
||||
|
||||
echo 'Running MPI test cases.'
|
||||
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
|
||||
${MPICOMMAND} ./caseMpiTests >& caseMpiTests.out
|
||||
echo 'MPI test cases complete.'
|
||||
echo | tail -n 3 ./caseMpiTests.out
|
||||
|
||||
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."
|
||||
67
tests/runNonCaseTests1-template.pbs
Normal file
67
tests/runNonCaseTests1-template.pbs
Normal file
@@ -0,0 +1,67 @@
|
||||
#!/bin/bash
|
||||
|
||||
#PBS -N {{ job_name }}
|
||||
#PBS -A {{ account }}
|
||||
#PBS -q {{ queue }}
|
||||
#PBS -l job_priority={{ job_priority }}
|
||||
#PBS -l walltime={{ walltime }}
|
||||
#PBS -l select=1:ncpus=128:mpiprocs=64:ompthreads=128
|
||||
#PBS -j oe
|
||||
#PBS -m abe
|
||||
|
||||
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
echo 'Loading modules.'
|
||||
module --force purge
|
||||
{%- for module in modules %}
|
||||
module load {{ module }}
|
||||
{%- endfor %}
|
||||
module list
|
||||
|
||||
echo 'Setting up MAGE environment.'
|
||||
source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
export OMP_NUM_THREADS=128
|
||||
export MPI_TYPE_DEPTH=32
|
||||
export KMP_STACKSIZE=128M
|
||||
echo 'The active environment variables are:'
|
||||
printenv
|
||||
|
||||
echo 'Running GAMERA tests.'
|
||||
date
|
||||
./gamTests >& gamTests.out
|
||||
date
|
||||
echo 'GAMERA tests complete.'
|
||||
echo | tail -n 3 ./gamTests.out
|
||||
|
||||
echo 'Running REMIX tests.'
|
||||
date
|
||||
./mixTests >& mixTests.out
|
||||
date
|
||||
echo 'REMIX tests complete.'
|
||||
echo | tail -n 3 ./mixTests.out
|
||||
|
||||
echo 'Running VOLTRON tests.'
|
||||
date
|
||||
./voltTests >& voltTests.out
|
||||
date
|
||||
echo 'VOLTRON tests complete.'
|
||||
echo | tail -n 3 ./voltTests.out
|
||||
|
||||
echo 'Running base MPI tests.'
|
||||
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
|
||||
date
|
||||
${MPICOMMAND} ./baseMpiTests >& baseMpiTests.out
|
||||
date
|
||||
echo 'Base MPI tests complete.'
|
||||
echo | tail -n 3 ./baseMpiTests.out
|
||||
|
||||
echo 'Running GAMERA MPI tests.'
|
||||
date
|
||||
${MPICOMMAND} ./gamMpiTests >& gamMpiTests.out
|
||||
date
|
||||
echo 'GAMERA MPI tests complete.'
|
||||
echo | tail -n 3 ./gamMpiTests.out
|
||||
|
||||
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."
|
||||
39
tests/runNonCaseTests2-template.pbs
Normal file
39
tests/runNonCaseTests2-template.pbs
Normal file
@@ -0,0 +1,39 @@
|
||||
#!/bin/bash
|
||||
|
||||
#PBS -N {{ job_name }}
|
||||
#PBS -A {{ account }}
|
||||
#PBS -q {{ queue }}
|
||||
#PBS -l job_priority={{ job_priority }}
|
||||
#PBS -l walltime={{ walltime }}
|
||||
#PBS -l select=1:ncpus=128:mpiprocs=9:ompthreads=14
|
||||
#PBS -j oe
|
||||
#PBS -m abe
|
||||
|
||||
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
echo 'Loading modules.'
|
||||
module --force purge
|
||||
{%- for module in modules %}
|
||||
module load {{ module }}
|
||||
{%- endfor %}
|
||||
module list
|
||||
|
||||
echo 'Setting up MAGE environment.'
|
||||
source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
# export OMP_NUM_THREADS=128
|
||||
export MPI_TYPE_DEPTH=32
|
||||
export KMP_STACKSIZE=128M
|
||||
echo 'The active environment variables are:'
|
||||
printenv
|
||||
|
||||
echo 'Running VOLTRON MPI tests.'
|
||||
date
|
||||
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
|
||||
${MPICOMMAND} ./voltMpiTests >& voltMpiTests.out
|
||||
date
|
||||
echo 'VOLTRON MPI tests complete.'
|
||||
echo | tail -n 3 ./voltMpiTests.out
|
||||
|
||||
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."
|
||||
@@ -1,32 +1,27 @@
|
||||
#!/bin/bash
|
||||
#PBS -N nonCaseTests2
|
||||
#PBS -A P28100045
|
||||
#PBS -l walltime=12:00:00
|
||||
#PBS -q main
|
||||
|
||||
#PBS -N {{ job_name }}
|
||||
#PBS -A {{ account }}
|
||||
#PBS -q {{ queue }}
|
||||
#PBS -l job_priority={{ job_priority }}
|
||||
#PBS -l walltime={{ walltime }}
|
||||
#PBS -l select=2:ncpus=128:mpiprocs=9:ompthreads=128
|
||||
#PBS -j oe
|
||||
#PBS -m abe
|
||||
|
||||
# NOTE: The user account must be specified on the qsub command line with the
|
||||
# -A option,
|
||||
# KAIJUROOTDIR and MODULE_LIST must be set as transferred environment
|
||||
# variables on the qsub command line.
|
||||
# Example qsub command:
|
||||
# qsub -A P28100045 -v $HOME/kaiju,MODULE_LIST='module1 module2 ...'
|
||||
|
||||
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
echo 'Setting up MAGE environment.'
|
||||
source $KAIJUROOTDIR/scripts/setupEnvironment.sh
|
||||
|
||||
echo 'Loading modules.'
|
||||
module --force purge
|
||||
module load $MODULE_LIST
|
||||
echo 'The following modules are loaded:'
|
||||
{%- for module in modules %}
|
||||
module load {{ module }}
|
||||
{%- endfor %}
|
||||
module list
|
||||
|
||||
echo 'Setting up MAGE environment.'
|
||||
source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
|
||||
export OMP_NUM_THREADS=128
|
||||
export MPI_TYPE_DEPTH=32
|
||||
export KMP_STACKSIZE=128M
|
||||
@@ -34,9 +29,8 @@ echo 'The active environment variables are:'
|
||||
printenv
|
||||
|
||||
echo 'Running VOLTRON MPI tests.'
|
||||
date
|
||||
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
|
||||
${MPICOMMAND} ./voltMpiTests > voltMpiTests.out
|
||||
date
|
||||
echo 'VOLTRON MPI tests complete.'
|
||||
echo | tail -n 3 ./voltMpiTests.out
|
||||
|
||||
|
||||
52
tests/unitTestReport-template.pbs
Normal file
52
tests/unitTestReport-template.pbs
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/bin/bash
|
||||
|
||||
#PBS -N {{ job_name }}
|
||||
#PBS -A {{ account }}
|
||||
#PBS -q {{ queue }}
|
||||
#PBS -l job_priority={{ job_priority }}
|
||||
#PBS -l walltime={{ walltime }}
|
||||
#PBS -l select=1:ncpus=128
|
||||
#PBS -j oe
|
||||
#PBS -m abe
|
||||
|
||||
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
echo 'Loading modules.'
|
||||
module --force purge
|
||||
{%- for module in modules %}
|
||||
module load {{ module }}
|
||||
{%- endfor %}
|
||||
module list
|
||||
|
||||
echo 'Loading python environment.'
|
||||
mage_test_root='{{ mage_test_root }}'
|
||||
export CONDARC="${mage_test_root}/condarc"
|
||||
export CONDA_ENVS_PATH="${mage_test_root}/conda"
|
||||
mage_miniconda3="${mage_test_root}/miniconda3"
|
||||
mage_conda="${mage_miniconda3}/bin/conda"
|
||||
__conda_setup="$($mage_conda 'shell.bash' 'hook' 2> /dev/null)"
|
||||
if [ $? -eq 0 ]; then
|
||||
eval "$__conda_setup"
|
||||
else
|
||||
if [ -f "$mage_miniconda3/etc/profile.d/conda.sh" ]; then
|
||||
. "$mage_miniconda3/etc/profile.d/conda.sh"
|
||||
else
|
||||
export PATH="$mage_miniconda3/bin:$PATH"
|
||||
fi
|
||||
fi
|
||||
unset __conda_setup
|
||||
conda activate kaiju-3.8-testing
|
||||
|
||||
echo 'Setting up MAGE environment.'
|
||||
source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
export MAGE_TEST_SET_ROOT={{ mage_test_set_root }}
|
||||
export SLACK_BOT_TOKEN={{ slack_bot_token }}
|
||||
echo 'The active environment variables are:'
|
||||
printenv
|
||||
|
||||
echo 'Generating unit test report.'
|
||||
python $KAIJUHOME/testingScripts/unitTestReport.py {{ report_options }} >& unitTestReport.out
|
||||
|
||||
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."
|
||||
@@ -31,7 +31,7 @@
|
||||
</CHIMP>
|
||||
<REMIX>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="F"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="F"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="F"/>
|
||||
</REMIX>
|
||||
<RCM>
|
||||
<ellipse xSun="12.5" yDD="15.0" xTail="-15.0" isDynamic="T"/>
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
</CHIMP>
|
||||
<REMIX>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="F"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="F"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="F"/>
|
||||
</REMIX>
|
||||
<RCM>
|
||||
<ellipse xSun="12.5" yDD="15.0" xTail="-15.0" isDynamic="T"/>
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
</CHIMP>
|
||||
<REMIX>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="F"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="F"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="F"/>
|
||||
</REMIX>
|
||||
<RCM>
|
||||
<ellipse xSun="12.5" yDD="15.0" xTail="-15.0" isDynamic="T"/>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="55" LowLatBoundary="55.0"/>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="T"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
</REMIX>
|
||||
<CHIMP>
|
||||
<units uid="EARTHCODE"/>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="55" LowLatBoundary="55.0"/>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="T"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
</REMIX>
|
||||
<CHIMP>
|
||||
<units uid="EARTHCODE"/>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="55" LowLatBoundary="55.0"/>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="T"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
</REMIX>
|
||||
<CHIMP>
|
||||
<units uid="EARTHCODE"/>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="55" LowLatBoundary="55.0"/>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="T"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
</REMIX>
|
||||
<CHIMP>
|
||||
<units uid="EARTHCODE"/>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="55" LowLatBoundary="55.0"/>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="T"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
</REMIX>
|
||||
<CHIMP>
|
||||
<units uid="EARTHCODE"/>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="55" LowLatBoundary="55.0"/>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="T"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
</REMIX>
|
||||
<CHIMP>
|
||||
<units uid="EARTHCODE"/>
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="55" LowLatBoundary="55.0"/>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="T"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
</REMIX>
|
||||
<CHIMP>
|
||||
<units uid="EARTHCODE"/>
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="55" LowLatBoundary="55.0"/>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="T"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
</REMIX>
|
||||
<CHIMP>
|
||||
<units uid="EARTHCODE"/>
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
<REMIX>
|
||||
<grid Np="360" Nt="55" LowLatBoundary="55.0"/>
|
||||
<conductance doStarlight="T" doRamp="F" doMR="T"/>
|
||||
<precipitation aurora_model_type="RCMONO" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
<precipitation aurora_model_type="LINMRG" alpha="0.2" beta="0.4" doAuroralSmooth="T"/>
|
||||
</REMIX>
|
||||
<CHIMP>
|
||||
<units uid="EARTHCODE"/>
|
||||
|
||||
Reference in New Issue
Block a user