I think I got unit tests working again.

This commit is contained in:
Eric Winter
2024-07-15 15:08:55 -06:00
parent 658254e479
commit 2575676b95
15 changed files with 873 additions and 302 deletions

View File

@@ -0,0 +1,224 @@
#!/usr/bin/env python
"""Run MAGE python unit tests.
This script runs a series of unit tests of the MAGE python software.
Authors
-------
Jeff Garretson
Eric Winter
"""
# Import standard modules.
import datetime
import os
import shutil
import subprocess
import sys
# Import 3rd-party modules.
from jinja2 import Template
# Import project modules.
import common
# Program constants
# Program description.
DESCRIPTION = 'Script for MAGE python unit testing'
# Paths under this test session directory.
# Path to directory for this set of python unit tests.
PYUNIT_TEST_DIRECTORY = os.path.join(os.environ['MAGE_TEST_SET_ROOT'],
'pyunitTest')
# Name of PBS file to create from the jinja2 template for the tests.
PBS_FILE = 'pyunitTest.pbs'
# Paths under the kaiju installation to test.
# Top of installation tree for kaiju installation to test.
KAIJUHOME = os.environ['KAIJUHOME']
# Path to testing script directory.
KAIJU_TEST_SCRIPTS_DIRECTORY = os.path.join(KAIJUHOME, 'testingScripts')
# Path to pytests directory.
KAIJU_PYTESTS_DIRECTORY = os.path.join(KAIJUHOME, 'pytests')
# Path to jinja2 template file for PBS script for weekly dash runs.
PBS_TEMPLATE = os.path.join(
KAIJU_TEST_SCRIPTS_DIRECTORY, 'pyunitTest-template.pbs'
)
def main():
"""Begin main program.
This is the main program code.
Parameters
----------
None
Returns
-------
None
Raises
------
None
"""
# Set up the command-line parser.
parser = common.create_command_line_parser(DESCRIPTION)
# Parse the command-line arguments.
args = parser.parse_args()
if args.debug:
print(f"args = {args}")
debug = args.debug
be_loud = args.loud
is_test = args.test
verbose = args.verbose
# ------------------------------------------------------------------------
if debug:
print(f"Starting {sys.argv[0]} at {datetime.datetime.now()}")
print(f"Current directory is {os.getcwd()}")
# ------------------------------------------------------------------------
# Make a directory to hold the python unit tests, and go there.
if verbose:
print(f"Creating {PYUNIT_TEST_DIRECTORY}.")
os.mkdir(PYUNIT_TEST_DIRECTORY)
os.chdir(PYUNIT_TEST_DIRECTORY)
# ------------------------------------------------------------------------
# Create the PBS script for this test session from the template.
# Read the template for the PBS script used for the tests.
with open(PBS_TEMPLATE, 'r', encoding='utf-8') as f:
template_content = f.read()
pbs_template = Template(template_content)
if debug:
print(f"pbs_template = {pbs_template}")
# Set the values for the template fields.
pbs_options = {}
pbs_options['job_name'] = 'pyunitTest'
pbs_options['account'] = os.environ['DERECHO_TESTING_ACCOUNT']
pbs_options['queue'] = 'main'
pbs_options['job_priority'] = 'economy'
pbs_options['walltime'] = '01:00:00'
pbs_options['select'] = '1'
pbs_options['ncpus'] = '128'
pbs_options['mpiprocs'] = '1'
pbs_options['ompthreads'] = '128'
pbs_options['mage_test_root'] = os.environ['MAGE_TEST_ROOT']
pbs_options['cdf_setup_script'] = (
f"{os.environ['MAGE_TEST_ROOT']}/local/cdf/3.9.0/bin/definitions.B"
)
pbs_options['condarc'] = os.environ['CONDARC']
pbs_options['conda_envs_path'] = os.environ['CONDA_ENVS_PATH']
pbs_options['conda_environment'] = 'kaiju-3.8-testing'
pbs_options['kaijuhome'] = os.environ['KAIJUHOME']
pbs_options['tmpdir'] = os.environ['TMPDIR']
pbs_options['slack_bot_token'] = os.environ['SLACK_BOT_TOKEN']
pbs_options['pytest_output_file'] = 'kaiju-pyunit.txt'
# Render the template and write it to a file.
pbs_content = pbs_template.render(pbs_options)
with open(PBS_FILE, 'w', encoding='utf-8') as f:
f.write(pbs_content)
# ------------------------------------------------------------------------
# Copy the python unit test files from the source tree.
for filename in ['test_satcomp_cdasws.py']:
from_path = os.path.join(KAIJU_PYTESTS_DIRECTORY, filename)
to_path = os.path.join('.', filename)
shutil.copyfile(from_path, to_path)
# ------------------------------------------------------------------------
# Run the python unit tests as a PBS job.
# Submit the unit test script for python.
cmd = f"qsub {PBS_FILE}"
if debug:
print(f"cmd = {cmd}")
cproc = subprocess.run(cmd, shell=True, check=True, text=True,
capture_output=True)
readString = cproc.stdout.rstrip()
if debug:
print(f"readString = {readString}")
job_name_1 = readString.split('.')[0]
if debug:
print(f"job_name_1 = {job_name_1}")
if verbose:
print(
f"Python unit test PBS script {PBS_FILE} submitted as "
f"job {job_name_1}."
)
# -------------------------------------------------------------------------
# Set up for communication with Slack.
slack_client = common.slack_create_client()
if debug:
print(f"slack_client = {slack_client}")
# ------------------------------------------------------------------------
# Detail the test results
test_report_details_string = ''
test_report_details_string += (
f"Test results are in {os.getcwd()}.\n"
)
test_report_details_string += (
'Python unit test PBS job script `pyunit.pbs` submitted as job '
f"{job_name_1}.\n"
)
# Summarize the test results
test_summary_message = (
'Python unit tests submitted by `pyunitTest.py`'
f" for branch or commit or tag {os.environ['BRANCH_OR_COMMIT']}: "
)
# Print the test results summary and details.
print(test_summary_message)
print(test_report_details_string)
# If loud mode is on, post report to Slack.
if be_loud:
test_summary_message += 'Details in thread for this messsage.\n'
slack_response_summary = common.slack_send_message(
slack_client, test_summary_message, is_test=is_test
)
if slack_response_summary['ok']:
thread_ts = slack_response_summary['ts']
slack_response_details = common.slack_send_message(
slack_client, test_report_details_string, thread_ts=thread_ts,
is_test=is_test
)
else:
print('*ERROR* Unable to post test result summary to Slack.')
# ------------------------------------------------------------------------
if debug:
print(f"Ending {sys.argv[0]} at {datetime.datetime.now()}")
if __name__ == '__main__':
main()

View File

@@ -1,12 +1,18 @@
# NOTE: Embedding the command sent to derecho via ssh in double-quotes
# ensures proper remote argument parsing.
# Run tests every morning on the development and master branches.
05 00 * * * ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'buildTest.py -lv,ICtest.py -lv,intelChecks.py -lv,unitTest.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/nightly-tests-development.out 2>&1
10 00 * * * ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'buildTest.py -lv,ICtest.py -lv,intelChecks.py -lv,unitTest.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/nightly-tests-master.out 2>&1
# Run build, initial condition, and Intel Inspector tests every morning on the
# development and master branches.
05 00 * * * ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'buildTest.py -lv,ICtest.py -lv,intelChecks.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/nightly-tests-development.out 2>&1
10 00 * * * ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'buildTest.py -lv,ICtest.py -lv,intelChecks.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/nightly-tests-master.out 2>&1
# Run weekly dashes every Monday morning for the development and
# master branches.
# Run Fortran unit tests every morning, separately (since they change the
# source tree).
15 00 * * * ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'unitTest.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/nightly-tests-development.out 2>&1
20 00 * * * ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'unitTest.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/nightly-tests-master.out 2>&1
# Run weekly dashes every Monday morning for the development and master
# branches.
# Duplicate dashes for development
25 00 * * 1 ssh derecho "/glade/u/home/ewinter/scratch/mage_testing/kaiju-private/ewinter-derecho_testing/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'weeklyDash.py -lv'" >> /glade/u/home/ewinter/scratch/mage_testing/logs/weeklyDash-development-1.out 2>&1

View File

@@ -21,9 +21,9 @@ Eric Winter
# Import standard modules.
import datetime
import os
import sys
import shutil
import subprocess
import sys
# Import 3rd-party modules.
from jinja2 import Template

View File

@@ -7,6 +7,18 @@ tests are run as PBS jobs on derecho. There will be one job which generates
the data for testing, then 1 or more dependent jobs that use the newly-
generated data for unit testing, then a job for the test report.
There are 5 PBS jobs used per module set:
1. Data generation - Runs in about 17 minutes on 4 derecho nodes.
2. Case tests -
3. Non-case tests #1 -
4. Non-case tests #2 -
5. Report generation -
NOTE: If this script is run as part of a set of tests for run_mage_tests.sh,
this script must be listed *last*, since it makes changes to the kaiju source
code tree that are incompatible with the other tests.
@@ -21,13 +33,13 @@ Eric Winter
# Import standard modules.
import datetime
# import glob
import os
import shutil
import subprocess
import sys
# Import 3rd-party modules.
from jinja2 import Template
# Import project modules.
import common
@@ -38,6 +50,9 @@ import common
# Program description.
DESCRIPTION = 'Script for MAGE Fortran unit testing'
# Home directory of kaiju installation
KAIJUHOME = os.environ['KAIJUHOME']
# Root of directory tree for this set of tests.
MAGE_TEST_SET_ROOT = os.environ['MAGE_TEST_SET_ROOT']
@@ -60,15 +75,9 @@ PFUNIT_BINARY_DIRECTORIES = [
'PFUNIT-4.2',
]
# Home directory of kaiju installation
KAIJUHOME = os.environ['KAIJUHOME']
# Path to kaiju subdirectory for external code
KAIJU_EXTERNAL_DIRECTORY = os.path.join(KAIJUHOME, 'external')
# Prefix for naming unit test directories
UNIT_TEST_DIRECTORY_PREFIX = 'unitTest_'
# Path to directory containing the test scripts
TEST_SCRIPTS_DIRECTORY = os.path.join(KAIJUHOME, 'testingScripts')
@@ -79,17 +88,31 @@ MODULE_LIST_DIRECTORY = os.path.join(TEST_SCRIPTS_DIRECTORY,
# Name of file containing names of modules lists to use for unit tests
UNIT_TEST_LIST_FILE = os.path.join(MODULE_LIST_DIRECTORY, 'unit_test.lst')
# Path to directory containing unit test files.
TESTS_DIRECTORY = os.path.join(KAIJUHOME, 'tests')
# Path to directory containing the unit test scripts
UNIT_TEST_SCRIPTS_DIRECTORY = os.path.join(KAIJUHOME, 'tests')
# PBS scripts for unit test jobs.
UNIT_TEST_PBS_SCRIPTS = [
'genTestData.pbs',
'runCaseTests.pbs',
'runNonCaseTests1.pbs',
# 'runNonCaseTests2.pbs', # Hangs for 12 hours
'unitTestReport.pbs',
]
# Paths to jinja2 template files for PBS scripts.
DATA_GENERATION_PBS_TEMPLATE = os.path.join(
UNIT_TEST_SCRIPTS_DIRECTORY, 'genTestData-template.pbs'
)
RUN_CASE_TESTS_PBS_TEMPLATE = os.path.join(
UNIT_TEST_SCRIPTS_DIRECTORY, 'runCaseTests-template.pbs'
)
RUN_NON_CASE_TESTS_1_PBS_TEMPLATE = os.path.join(
UNIT_TEST_SCRIPTS_DIRECTORY, 'runNonCaseTests1-template.pbs'
)
RUN_NON_CASE_TESTS_2_PBS_TEMPLATE = os.path.join(
UNIT_TEST_SCRIPTS_DIRECTORY, 'runNonCaseTests2-template.pbs'
)
UNIT_TEST_REPORT_PBS_TEMPLATE = os.path.join(
UNIT_TEST_SCRIPTS_DIRECTORY, 'unitTestReport-template.pbs'
)
# Prefix for naming unit test directories
UNIT_TEST_DIRECTORY_PREFIX = 'unitTest_'
# Name of build subdirectory containing binaries
BUILD_BIN_DIR = 'bin'
# Input files for unit tests
UNIT_TEST_DATA_INPUT_DIRECTORY = os.path.join(
@@ -102,17 +125,12 @@ UNIT_TEST_DATA_INPUT_FILES = [
'rcmconfig.h5',
]
# Name of build subdirectory containing binaries
BUILD_BIN_DIR = 'bin'
# Name of PBS account to use for testing jobs.
DERECHO_TESTING_ACCOUNT = os.environ['DERECHO_TESTING_ACCOUNT']
# Token string for access to Slack.
SLACK_BOT_TOKEN = os.environ['SLACK_BOT_TOKEN']
# Branch or commit (or tag) used for testing.
BRANCH_OR_COMMIT = os.environ['BRANCH_OR_COMMIT']
# Names of PBS scripts to create from templates.
DATA_GENERATION_PBS_SCRIPT = 'genTestData.pbs'
RUN_CASE_TESTS_PBS_SCRIPT = 'runCaseTests.pbs'
RUN_NON_CASE_TESTS_1_PBS_SCRIPT = 'runNonCaseTests1.pbs'
RUN_NON_CASE_TESTS_2_PBS_SCRIPT = 'runNonCaseTests2.pbs'
UNIT_TEST_REPORT_PBS_SCRIPT = 'unitTestReport.pbs'
def main():
@@ -142,6 +160,7 @@ def main():
print(f"args = {args}")
debug = args.debug
be_loud = args.loud
slack_on_fail = args.slack_on_fail
is_test = args.test
verbose = args.verbose
@@ -153,7 +172,7 @@ def main():
# ------------------------------------------------------------------------
# Make a directory to hold all of the unit tests.
# Make a directory to hold all of the Fortran unit tests.
if verbose:
print(f"Creating ${UNIT_TEST_DIRECTORY}.")
os.mkdir(UNIT_TEST_DIRECTORY)
@@ -181,20 +200,68 @@ def main():
# ------------------------------------------------------------------------
# Initalize job ID to None for all module set/PBS script combinations.
job_ids = []
for _ in module_list_files:
job_ids.append([None]*len(UNIT_TEST_PBS_SCRIPTS))
# Read the template for the PBS script used for the test data generation.
with open(DATA_GENERATION_PBS_TEMPLATE, 'r', encoding='utf-8') as f:
template_content = f.read()
data_generation_pbs_template = Template(template_content)
if debug:
print(f"data_generation_pbs_template = {data_generation_pbs_template}")
# Read the template for the PBS script used for the case tests.
with open(RUN_CASE_TESTS_PBS_TEMPLATE, 'r', encoding='utf-8') as f:
template_content = f.read()
run_case_tests_pbs_template = Template(template_content)
if debug:
print(f"run_case_tests_pbs_template = {run_case_tests_pbs_template}")
# Read the template for the PBS script used for the 1st non-case tests.
with open(RUN_NON_CASE_TESTS_1_PBS_TEMPLATE, 'r', encoding='utf-8') as f:
template_content = f.read()
run_non_case_tests_1_pbs_template = Template(template_content)
if debug:
print('run_non_case_tests_1_pbs_template = '
f"{run_non_case_tests_1_pbs_template}")
# Read the template for the PBS script used for the 2nd non-case tests.
with open(RUN_NON_CASE_TESTS_2_PBS_TEMPLATE, 'r', encoding='utf-8') as f:
template_content = f.read()
run_non_case_tests_2_pbs_template = Template(template_content)
if debug:
print('run_non_case_tests_2_pbs_template = '
f"{run_non_case_tests_2_pbs_template}")
# Read the template for the PBS script used for report generation.
with open(UNIT_TEST_REPORT_PBS_TEMPLATE, 'r', encoding='utf-8') as f:
template_content = f.read()
unit_test_report_pbs_template = Template(template_content)
if debug:
print('unit_test_report_pbs_template = '
f"{unit_test_report_pbs_template}")
# ------------------------------------------------------------------------
# Create the common make command for all module sets.
make_cmd = 'make gamera_mpi voltron_mpi allTests'
if debug:
print(f"make_cmd = {make_cmd}")
# Create the list for submit results. Only set to True if all qsub commands
# for a set are OK.
submit_ok = [False]*len(module_list_files)
if debug:
print(f"submit_ok = {submit_ok}")
# Create a list of lists for job IDs. There are 5 job IDs per set - one for
# data generration, case tests, non-case tests 1, non-case tests 2, and the
# test report.
job_ids = [[None, None, None, None, None]]*len(module_list_files)
if debug:
print(f"job_ids = {job_ids}")
# Run the unit tests with each set of modules.
for (i_set, module_list_file) in enumerate(module_list_files):
for (i_module_set, module_list_file) in enumerate(module_list_files):
if verbose:
print('Performing initial condition build tests with module set '
print('Performing unit tests tests with module set '
f"{module_list_file}.")
# Extract the name of the list.
@@ -202,8 +269,8 @@ def main():
if debug:
print(f"module_set_name = {module_set_name}.")
# Read this module list file, extracting cmake environment and
# options, if any.
# # Read this module list file, extracting cmake environment and
# # options, if any.
path = os.path.join(MODULE_LIST_DIRECTORY, module_list_file)
if debug:
print(f"path = {path}")
@@ -215,18 +282,20 @@ def main():
print(f"cmake_environment = {cmake_environment}")
print(f"cmake_options = {cmake_options}")
# Assemble the commands to load the listed modules.
# <HACK>
# Extra argument needed for unit test build.
cmake_options += ' -DCMAKE_BUILD_TYPE=RELWITHDEBINFO'
if debug:
print(f"cmake_options = {cmake_options}")
# </HACK>
# Assemble the command to load the listed modules.
module_cmd = (
f"module --force purge; module load {' '.join(module_names)}"
)
if debug:
print(f"module_cmd = {module_cmd}")
# <HACK>
# Extra argument needed for unit test build.
cmake_options += ' -DCMAKE_BUILD_TYPE=RELWITHDEBINFO'
# </HACK>
# Make a directory for this test, and go there.
dir_name = f"{UNIT_TEST_DIRECTORY_PREFIX}{module_set_name}"
build_directory = os.path.join(UNIT_TEST_DIRECTORY, dir_name)
@@ -249,7 +318,7 @@ def main():
print(f"cmd = {cmd}")
try:
# NOTE: stdout and stderr goes cmake.out.
_ = subprocess.run(cmd, shell=True, check=True)
cproc = subprocess.run(cmd, shell=True, check=True)
except subprocess.CalledProcessError as e:
print(
f"ERROR: cmake for module set {module_set_name} failed.\n"
@@ -289,12 +358,6 @@ def main():
# Go to the bin directory for testing.
os.chdir(BUILD_BIN_DIR)
# Copy in the PBS scripts for unit testing.
for filename in UNIT_TEST_PBS_SCRIPTS:
from_path = os.path.join(TESTS_DIRECTORY, filename)
to_path = os.path.join('.', filename)
shutil.copyfile(from_path, to_path)
# Copy in inputs for unit test data generation.
for filename in UNIT_TEST_DATA_INPUT_FILES:
from_path = os.path.join(
@@ -303,130 +366,262 @@ def main():
to_path = os.path.join('.', filename)
shutil.copyfile(from_path, to_path)
# Submit the jobs to create the test data and run the unit
# tests. Note that the unit test jobs will only run if the
# data generation job completes successfully.
for (j_pbs, pbs_file) in enumerate(UNIT_TEST_PBS_SCRIPTS):
job_id = None
cmd = (
f"qsub -A {DERECHO_TESTING_ACCOUNT} "
f"-v MODULE_LIST='{' '.join(module_names)}',"
f"KAIJUROOTDIR={KAIJUHOME},"
f"MAGE_TEST_SET_ROOT={MAGE_TEST_SET_ROOT},"
f"DERECHO_TESTING_ACCOUNT={DERECHO_TESTING_ACCOUNT},"
f"SLACK_BOT_TOKEN={SLACK_BOT_TOKEN}"
)
# <HACK>
# Assumes data generation job is first.
if j_pbs > 0:
cmd += f" -W depend=afterok:{job_ids[i_set][0]}"
# </HACK>
# <HACK>
# Assumes report generation job is last.
if pbs_file == 'unitTestReport.pbs':
cmd += f" -W depend=afterok:{':'.join(job_ids[i_set][1:-1])}"
# </HACK>
cmd += f" {pbs_file}"
if debug:
print(f"cmd = {cmd}")
try:
cproc = subprocess.run(cmd, shell=True, check=True,
text=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
path = os.path.join(
build_directory, BUILD_BIN_DIR, f"qsub_{j_pbs}.out"
)
with open(path, 'w', encoding='utf-8') as f:
f.write(e.stdout)
print(
'ERROR: Job submission failed.\n'
f"e.cmd = {e.cmd}\n"
f"e.returncode = {e.returncode}\n"
f"See {path} for output from qsub.\n"
'Skipping remaining steps for module set '
f"{module_set_name}.",
file=sys.stderr
)
continue
# Assemble common data to fill in the PBS templates.
pbs_options = {}
pbs_options['account'] = os.environ['DERECHO_TESTING_ACCOUNT']
pbs_options['queue'] = os.environ['DERECHO_TESTING_QUEUE']
pbs_options['job_priority'] = os.environ['DERECHO_TESTING_PRIORITY']
pbs_options['modules'] = module_names
pbs_options['kaijuhome'] = KAIJUHOME
pbs_options['tmpdir'] = os.environ['TMPDIR']
pbs_options['slack_bot_token'] = os.environ['SLACK_BOT_TOKEN']
pbs_options['mage_test_root'] = os.environ['MAGE_TEST_ROOT']
pbs_options['mage_test_set_root'] = os.environ['MAGE_TEST_SET_ROOT']
# Save the job ID.
job_id = cproc.stdout.split('.')[0]
if debug:
print(f"job_id = {job_id}")
job_ids[i_set][j_pbs] = job_id
# End of loop over PBS scripts
# Set options specific to the data generation job, then render the
# template.
pbs_options['job_name'] = 'mage_genTestData'
pbs_options['walltime'] = '00:30:00'
pbs_content = data_generation_pbs_template.render(pbs_options)
with open(DATA_GENERATION_PBS_SCRIPT, 'w', encoding='utf-8') as f:
f.write(pbs_content)
# Record the job IDs in a text file.
# Set options specific to the case tests job, then render the
# template.
pbs_options['job_name'] = 'mage_runCaseTests'
pbs_options['walltime'] = '12:00:00'
pbs_content = run_case_tests_pbs_template.render(pbs_options)
with open(RUN_CASE_TESTS_PBS_SCRIPT, 'w', encoding='utf-8') as f:
f.write(pbs_content)
# Set options specific to the 1st non-case tests job, then render the
# template.
pbs_options['job_name'] = 'mage_runNonCaseTests1'
pbs_options['walltime'] = '12:00:00'
pbs_content = run_non_case_tests_1_pbs_template.render(pbs_options)
with open(RUN_NON_CASE_TESTS_1_PBS_SCRIPT, 'w', encoding='utf-8') as f:
f.write(pbs_content)
# Set options specific to the 2nd non-case tests job, then render the
# template.
pbs_options['job_name'] = 'mage_runNonCaseTests2'
pbs_options['walltime'] = '12:00:00'
pbs_content = run_non_case_tests_2_pbs_template.render(pbs_options)
with open(RUN_NON_CASE_TESTS_2_PBS_SCRIPT, 'w', encoding='utf-8') as f:
f.write(pbs_content)
# Set options specific to the report generation job, then render the
# template.
pbs_options['job_name'] = 'mage_unitTestReport'
pbs_options['walltime'] = '00:00:10'
pbs_options['report_options'] = ''
if debug:
pbs_options['report_options'] += ' -d'
if be_loud:
pbs_options['report_options'] += ' -l'
if slack_on_fail:
pbs_options['report_options'] += ' -s'
if is_test:
pbs_options['report_options'] += ' -t'
if verbose:
pbs_options['report_options'] += ' -v'
pbs_content = unit_test_report_pbs_template.render(pbs_options)
with open(UNIT_TEST_REPORT_PBS_SCRIPT, 'w', encoding='utf-8') as f:
f.write(pbs_content)
# Run the data generation job.
cmd = f"qsub {DATA_GENERATION_PBS_SCRIPT}"
if debug:
print(f"cmd = {cmd}")
try:
cproc = subprocess.run(cmd, shell=True, check=True,
text=True, capture_output=True)
except subprocess.CalledProcessError as e:
print('ERROR: qsub failed.\n'
f"e.cmd = {e.cmd}\n"
f"e.returncode = {e.returncode}\n"
'See test log for output.\n'
'Skipping remaining steps for module set '
f"{module_set_name}.",
file=sys.stderr)
continue
job_id = cproc.stdout.split('.')[0]
if debug:
print(f"job_id = {job_id}")
job_ids[i_module_set][0] = job_id
# Run the case tests job if data was generated.
cmd = (
f"qsub -W depend=afterok:{job_ids[i_module_set][0]} "
f"{RUN_CASE_TESTS_PBS_SCRIPT}"
)
if debug:
print(f"cmd = {cmd}")
try:
cproc = subprocess.run(cmd, shell=True, check=True,
text=True, capture_output=True)
except subprocess.CalledProcessError as e:
print('ERROR: qsub failed.\n'
f"e.cmd = {e.cmd}\n"
f"e.returncode = {e.returncode}\n"
'See test log for output.\n'
'Skipping remaining steps for module set '
f"{module_set_name}.",
file=sys.stderr)
continue
job_id = cproc.stdout.split('.')[0]
if debug:
print(f"job_id = {job_id}")
job_ids[i_module_set][1] = job_id
# Run the 1st non-case tests job if data was generated.
cmd = (
f"qsub -W depend=afterok:{job_ids[i_module_set][0]} "
f"{RUN_NON_CASE_TESTS_1_PBS_SCRIPT}"
)
if debug:
print(f"cmd = {cmd}")
try:
cproc = subprocess.run(cmd, shell=True, check=True,
text=True, capture_output=True)
except subprocess.CalledProcessError as e:
print('ERROR: qsub failed.\n'
f"e.cmd = {e.cmd}\n"
f"e.returncode = {e.returncode}\n"
'See test log for output.\n'
'Skipping remaining steps for module set '
f"{module_set_name}.",
file=sys.stderr)
continue
job_id = cproc.stdout.split('.')[0]
if debug:
print(f"job_id = {job_id}")
job_ids[i_module_set][2] = job_id
# Run the 2nd non-case tests job if data was generated.
cmd = (
f"qsub -W depend=afterok:{job_ids[i_module_set][0]} "
f"{RUN_NON_CASE_TESTS_2_PBS_SCRIPT}"
)
if debug:
print(f"cmd = {cmd}")
try:
cproc = subprocess.run(cmd, shell=True, check=True,
text=True, capture_output=True)
except subprocess.CalledProcessError as e:
print('ERROR: qsub failed.\n'
f"e.cmd = {e.cmd}\n"
f"e.returncode = {e.returncode}\n"
'See test log for output.\n'
'Skipping remaining steps for module set '
f"{module_set_name}.",
file=sys.stderr)
continue
job_id = cproc.stdout.split('.')[0]
if debug:
print(f"job_id = {job_id}")
job_ids[i_module_set][3] = job_id
# Run the report generation job if all others ran OK.
cmd = (
f"qsub -W depend=afterok:{':'.join(job_ids[i_module_set][1:])} "
f"{UNIT_TEST_REPORT_PBS_SCRIPT}"
)
if debug:
print(f"cmd = {cmd}")
try:
cproc = subprocess.run(cmd, shell=True, check=True,
text=True, capture_output=True)
except subprocess.CalledProcessError as e:
print('ERROR: qsub failed.\n'
f"e.cmd = {e.cmd}\n"
f"e.returncode = {e.returncode}\n"
'See test log for output.\n'
'Skipping remaining steps for module set '
f"{module_set_name}.",
file=sys.stderr)
continue
job_id = cproc.stdout.split('.')[0]
if debug:
print(f"job_id = {job_id}")
job_ids[i_module_set][3] = job_id
# Record the job IDs for this module set in a file.
with open('jobs.txt', 'w', encoding='utf-8') as f:
for job_id in job_ids[i_set]:
for job_id in job_ids[i_module_set]:
f.write(f"{job_id}\n")
# This module set worked.
submit_ok[i_module_set] = True
# End of loop over module sets
# ------------------------------------------------------------------------
# Set up for communication with Slack.
slack_client = common.slack_create_client()
if debug:
print(f"slack_client = {slack_client}")
# ------------------------------------------------------------------------
# NOTE: Assumes only 1 module set was used.
# Detail the test results
test_report_details_string = ''
test_report_details_string += (
f"Test results are in {os.getcwd()}.\n"
)
test_report_details_string += (
'Fortran unit test PBS job script `genTestData.pbs` submitted as job '
f"{job_ids[0][0]}.\n"
)
test_report_details_string += (
'Fortran unit test PBS job script `runCaseTests.pbs` submitted as job '
f"{job_ids[0][1]}.\n"
)
test_report_details_string += (
'Fortran unit test PBS job script `runNonCaseTests1.pbs` submitted as'
f" job {job_ids[0][2]}.\n"
)
test_report_details_string += (
'Fortran unit test PBS job script `runNonCaseTests2.pbs` skipped'
' since it currently hangs on `derecho`.\n'
)
test_report_details_string += (
'Fortran unit test report PBS job script `unitTestReport.pbs` '
f"submitted as job {job_ids[0][3]}.\n"
f"Test results are in `{UNIT_TEST_DIRECTORY}`.\n"
)
for (i_module_set, module_list_file) in enumerate(module_list_files):
if not submit_ok[i_module_set]:
test_report_details_string += (
f"Module set `{module_list_file}`: *FAILED*"
)
continue
test_report_details_string += (
f"`{DATA_GENERATION_PBS_SCRIPT}` for module set "
f"`{module_list_file}` submitted as PBS job "
f"{job_ids[i_module_set][0]}.\n"
)
test_report_details_string += (
f"`{RUN_CASE_TESTS_PBS_SCRIPT}` for module set "
f"`{module_list_file}` submitted as PBS job "
f"{job_ids[i_module_set][1]}.\n"
)
test_report_details_string += (
f"`{RUN_NON_CASE_TESTS_1_PBS_SCRIPT}` for module set "
f"`{module_list_file}` submitted as PBS job "
f"{job_ids[i_module_set][2]}.\n"
)
test_report_details_string += (
f"`{RUN_NON_CASE_TESTS_2_PBS_SCRIPT}` for module set "
f"`{module_list_file}` submitted as PBS job "
f"{job_ids[i_module_set][3]}.\n"
)
test_report_details_string += (
f"`{UNIT_TEST_REPORT_PBS_SCRIPT}` for module set "
f"`{module_list_file}` submitted as PBS job "
f"{job_ids[i_module_set][4]}.\n"
)
# Summarize the test results
test_report_summary_string = (
'Fortran unit tests submitted by `unitTest.py`'
f" for branch or commit or tag {BRANCH_OR_COMMIT}\n"
f" for branch or commit or tag {os.environ['BRANCH_OR_COMMIT']}\n"
)
# Print the test results summary and details.
print(test_report_summary_string)
print(test_report_details_string)
# If loud mode is on, post report to Slack.
if be_loud:
test_report_summary_string += 'Details in thread for this messsage.\n'
# If a test failed, or loud mode is on, post report to Slack.
if (slack_on_fail and 'FAILED' in test_report_details_string) or be_loud:
slack_client = common.slack_create_client()
if debug:
print(f"slack_client = {slack_client}")
slack_response_summary = common.slack_send_message(
slack_client, test_report_summary_string, is_test=is_test
)
if slack_response_summary['ok']:
thread_ts = slack_response_summary['ts']
slack_response_details = common.slack_send_message(
slack_client, test_report_details_string, thread_ts=thread_ts,
is_test=is_test
)
if 'ok' not in slack_response_details:
print('*ERROR* Unable to post test details to Slack.')
else:
print('*ERROR* Unable to post test summary to Slack.')
if debug:
print(f"slack_response_summary = {slack_response_summary}")
thread_ts = slack_response_summary['ts']
slack_response_summary = common.slack_send_message(
slack_client, test_report_details_string, thread_ts=thread_ts,
is_test=is_test
)
if debug:
print(f"slack_response_summary = {slack_response_summary}")
# ------------------------------------------------------------------------

View File

@@ -75,6 +75,7 @@ def main():
print(f"args = {args}")
debug = args.debug
be_loud = args.loud
slack_on_fail = args.slack_on_fail
is_test = args.test
verbose = args.verbose
@@ -187,15 +188,6 @@ def main():
# ------------------------------------------------------------------------
# Set up for communication with Slack.
slack_client = common.slack_create_client()
if debug:
print(f"slack_client = {slack_client}")
# ------------------------------------------------------------------------
# NOTE: Assumes only 1 module set was used.
# Detail the test results
test_report_details_string = ''
test_report_details_string += (
@@ -217,28 +209,29 @@ def main():
if myError or jobKilled or okFailure:
test_report_summary_string += '*FAILED*\n'
else:
test_report_summary_string += '*ALL PASSED*\n'
test_report_summary_string += '*PASSED*\n'
# Print the test results summary and details.
print(test_report_summary_string)
print(test_report_details_string)
# If loud mode is on, post report to Slack.
if be_loud:
test_report_summary_string += 'Details in thread for this messsage.\n'
# If a test failed, or loud mode is on, post report to Slack.
if (slack_on_fail and 'FAILED' in test_report_details_string) or be_loud:
slack_client = common.slack_create_client()
if debug:
print(f"slack_client = {slack_client}")
slack_response_summary = common.slack_send_message(
slack_client, test_report_summary_string, is_test=is_test
)
if slack_response_summary['ok']:
thread_ts = slack_response_summary['ts']
slack_response_details = common.slack_send_message(
slack_client, test_report_details_string, thread_ts=thread_ts,
is_test=is_test
)
if 'ok' not in slack_response_details:
print('*ERROR* Unable to post test details to Slack.')
else:
print('*ERROR* Unable to post test summary to Slack.')
if debug:
print(f"slack_response_summary = {slack_response_summary}")
thread_ts = slack_response_summary['ts']
slack_response_summary = common.slack_send_message(
slack_client, test_report_details_string, thread_ts=thread_ts,
is_test=is_test
)
if debug:
print(f"slack_response_summary = {slack_response_summary}")
# ------------------------------------------------------------------------

View File

@@ -1,46 +1,28 @@
#!/bin/bash
#PBS -N {{ job_name }}
#PBS -A P28100045
#PBS -q main
#PBS -A {{ account }}
#PBS -q {{ queue }}
#PBS -l job_priority={{ job_priority }}
#PBS -l select=8:ncpus=128:mpiprocs=2:ompthreads=64+3:ncpus=128:mpiprocs=1:ompthreads=128
#PBS -l walltime=06:00:00
#PBS -l walltime={{ walltime }}
#PBS -j oe
#PBS -m abe
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
{{ module_cmd }}
echo 'The following modules are loaded:'
module --force purge
{%- for module in modules %}
module load {{ module }}
{%- endfor %}
module list
echo 'Setting up MAGE environment.'
KAIJUROOTDIR={{ KAIJUROOTDIR }}
source $KAIJUROOTDIR/scripts/setupEnvironment.sh
echo 'Setting environment variables.'
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
export TMPDIR=/glade/work/ewinter/mage_testing/derecho/tmp
export OMP_NUM_THREADS=128
export MPI_TYPE_DEPTH=32
export KMP_STACKSIZE=128M
export MAGE_TEST_ROOT='/glade/work/ewinter/mage_testing/derecho'
export MAGE_TEST_SET_ROOT={{ MAGE_TEST_SET_ROOT }}
export BRANCH_OR_COMMIT={{ BRANCH_OR_COMMIT }}
export SLACK_BOT_TOKEN='xoxb-1065817665921-1413594823303-gUePq3obrqlPmlCHC5E7rKVP'
export DERECHO_TESTING_ACCOUNT=P28100045
echo 'The active environment variables are:'
printenv
# Run the model.
$MPICOMMAND ./voltron_mpi.x weeklyDashGo.xml > weeklyDashGo.out
# Generate the report.
export CONDARC="${MAGE_TEST_ROOT}/condarc"
export CONDA_ENVS_PATH="${MAGE_TEST_ROOT}/conda"
mage_miniconda3="${MAGE_TEST_ROOT}/miniconda3"
echo 'Loading python environment.'
mage_test_root='{{ mage_test_root }}'
export CONDARC="${mage_test_root}/condarc"
export CONDA_ENVS_PATH="${mage_test_root}/conda"
mage_miniconda3="${mage_test_root}/miniconda3"
mage_conda="${mage_miniconda3}/bin/conda"
__conda_setup="$($mage_conda 'shell.bash' 'hook' 2> /dev/null)"
if [ $? -eq 0 ]; then
@@ -54,6 +36,26 @@ else
fi
unset __conda_setup
conda activate kaiju-3.8-testing
python $KAIJUHOME/testingScripts/weeklyDashReport.py -dtlv >& weeklyDashReport.out
echo 'Setting up MAGE environment.'
source {{ kaijuhome }}/scripts/setupEnvironment.sh
echo 'Setting environment variables.'
export TMPDIR={{ tmpdir }}
export SLACK_BOT_TOKEN={{ slack_bot_token }}
export OMP_NUM_THREADS=128
export MPI_TYPE_DEPTH=32
export KMP_STACKSIZE=128M
export MAGE_TEST_ROOT=$mage_test_root
export MAGE_TEST_SET_ROOT={{ mage_test_set_root }}
export BRANCH_OR_COMMIT={{ branch_or_commit }}
echo 'The active environment variables are:'
printenv
# Run the model.
$MPICOMMAND ./voltron_mpi.x weeklyDashGo.xml >& weeklyDashGo.out
# Generate the report.
python $KAIJUHOME/testingScripts/weeklyDashReport.py {{ report_options }} >& weeklyDashReport.out
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."

View File

@@ -1,54 +0,0 @@
#!/bin/bash
#PBS -N wDashGo
#PBS -A P28100045
#PBS -l walltime=12:00:00
#PBS -l job_priority=economy
#PBS -q main
#PBS -j oe
#PBS -l select=8:ncpus=128:mpiprocs=2:ompthreads=64+3:ncpus=128:mpiprocs=1:ompthreads=128
#PBS -m abe
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Setting up MAGE environment.'
source $KAIJUROOTDIR/scripts/setupEnvironment.sh
echo 'Loading modules.'
module --force purge
module load $MODULE_LIST
echo 'The following modules are loaded:'
module list
echo 'Setting environment variables.'
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
export TMPDIR=/glade/work/ewinter/mage_testing/derecho/tmp
export OMP_NUM_THREADS=128
export MPI_TYPE_DEPTH=32
export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
# Run the model.
$MPICOMMAND ./voltron_mpi.x weeklyDashGo.xml > weeklyDashGo.out
# Generate the report.
export MAGE_TEST_ROOT='/glade/work/ewinter/mage_testing/derecho'
export CONDARC="${MAGE_TEST_ROOT}/condarc"
export CONDA_ENVS_PATH="${MAGE_TEST_ROOT}/conda"
mage_miniconda3="${MAGE_TEST_ROOT}/miniconda3"
mage_conda="${mage_miniconda3}/bin/conda"
__conda_setup="$($mage_conda 'shell.bash' 'hook' 2> /dev/null)"
if [ $? -eq 0 ]; then
eval "$__conda_setup"
else
if [ -f "$mage_miniconda3/etc/profile.d/conda.sh" ]; then
. "$mage_miniconda3/etc/profile.d/conda.sh"
else
export PATH="$mage_miniconda3/bin:$PATH"
fi
fi
unset __conda_setup
conda activate kaiju-3.8-testing
python $KAIJUHOME/testingScripts/weeklyDashReport.py -dlv >& weeklyDashReport.out
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."

View File

@@ -28,9 +28,9 @@ import matplotlib.dates as mdates
import matplotlib.pyplot as plt
# Import project modules.
import common
import kaipy.kaiH5 as kh5
import kaipy.kaiViz as kv
import common
# Program constants
@@ -99,19 +99,6 @@ VOLTRON_OUTPUT_FILE_DEVELOPMENT = os.path.join(
REFERENCE_RESULTS_DIRECTORY_DEVELOPMENT, VOLTRON_OUTPUT_FILE
)
# # Home directory of kaiju installation
# KAIJUHOME = os.environ['KAIJUHOME']
# # Top-level directory for testing
# KAIJU_TESTING_HOME = '/glade/work/ewinter/mage_testing/derecho'
# # Path to directory containing weekly dash results.
# WEEKLY_DASH_DIRECTORY = os.path.join(KAIJUHOME, 'weeklyDash_01')
# # Name of weekly dash log file.
# weekly_dash_log_latest = 'weeklyDashGo.out'
# Compute the paths to the quicklook plots for the master branch.
MAGNETOSPHERE_QUICKLOOK_MASTER = os.path.join(
REFERENCE_RESULTS_DIRECTORY_MASTER, 'qkmsphpic.png'
@@ -168,6 +155,7 @@ def main():
print(f"args = {args}")
debug = args.debug
be_loud = args.loud
# slack_on_fail = args.slack_on_fail
is_test = args.test
verbose = args.verbose
@@ -1032,13 +1020,6 @@ def main():
# ------------------------------------------------------------------------
# Set up for communication with Slack.
slack_client = common.slack_create_client()
if debug:
print(f"slack_client = {slack_client}")
# ------------------------------------------------------------------------
# List the files to post and their comments.
images_to_post = [
'perfPlots.png',
@@ -1069,6 +1050,9 @@ def main():
# If loud mode is on, post results to Slack.
if be_loud:
slack_client = common.slack_create_client()
if debug:
print(f"slack_client = {slack_client}")
message = (
'Weekly dash result plots complete on branch '
f"{BRANCH_OR_COMMIT}.\n"

View File

@@ -0,0 +1,35 @@
#!/bin/bash
#PBS -N {{ job_name }}
#PBS -A {{ account }}
#PBS -q {{ queue }}
#PBS -l job_priority={{ job_priority }}
#PBS -l walltime={{ walltime }}
#PBS -l select=4:ncpus=128:mpiprocs=2:ompthreads=64+1:ncpus=128:mpiprocs=1:ompthreads=128
#PBS -j oe
#PBS -m abe
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
module --force purge
{%- for module in modules %}
module load {{ module }}
{%- endfor %}
module list
echo 'Setting up MAGE environment.'
source {{ kaijuhome }}/scripts/setupEnvironment.sh
echo 'Setting environment variables.'
export OMP_NUM_THREADS=128
export MPI_TYPE_DEPTH=32
export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
echo 'Generating data for testing.'
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
$MPICOMMAND ./voltron_mpi.x geo_mpi.xml >& geo_mpi.out
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."

View File

@@ -0,0 +1,42 @@
#!/bin/bash
#PBS -N {{ job_name }}
#PBS -A {{ account }}
#PBS -q {{ queue }}
#PBS -l job_priority={{ job_priority }}
#PBS -l walltime={{ walltime }}
#PBS -l select=1:ncpus=128:mpiprocs=8:ompthreads=16
#PBS -j oe
#PBS -m abe
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
module --force purge
{%- for module in modules %}
module load {{ module }}
{%- endfor %}
module list
echo 'Setting up MAGE environment.'
source {{ kaijuhome }}/scripts/setupEnvironment.sh
echo 'Setting environment variables.'
export OMP_NUM_THREADS=128
export MPI_TYPE_DEPTH=32
export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
echo 'Running non-MPI test cases.'
./caseTests >& caseTests.out
echo 'Non-MPI test cases complete.'
echo | tail -n 3 ./caseTests.out
echo 'Running MPI test cases.'
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
${MPICOMMAND} ./caseMpiTests >& caseMpiTests.out
echo 'MPI test cases complete.'
echo | tail -n 3 ./caseMpiTests.out
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."

View File

@@ -0,0 +1,57 @@
#!/bin/bash
#PBS -N {{ job_name }}
#PBS -A {{ account }}
#PBS -q {{ queue }}
#PBS -l job_priority={{ job_priority }}
#PBS -l walltime={{ walltime }}
#PBS -l select=1:ncpus=128:mpiprocs=64:ompthreads=128
#PBS -j oe
#PBS -m abe
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
module --force purge
{%- for module in modules %}
module load {{ module }}
{%- endfor %}
module list
echo 'Setting up MAGE environment.'
source {{ kaijuhome }}/scripts/setupEnvironment.sh
echo 'Setting environment variables.'
export OMP_NUM_THREADS=128
export MPI_TYPE_DEPTH=32
export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
echo 'Running GAMERA tests.'
./gamTests >& gamTests.out
echo 'GAMERA tests complete.'
echo | tail -n 3 ./gamTests.out
echo 'Running REMIX tests.'
./mixTests >& mixTests.out
echo 'REMIX tests complete.'
echo | tail -n 3 ./mixTests.out
echo 'Running VOLTRON tests.'
./voltTests >& voltTests.out
echo 'VOLTRON tests complete.'
echo | tail -n 3 ./voltTests.out
echo 'Running base MPI tests.'
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
${MPICOMMAND} ./baseMpiTests >& baseMpiTests.out
echo 'Base MPI tests complete.'
echo | tail -n 3 ./baseMpiTests.out
echo 'Running GAMERA MPI tests.'
${MPICOMMAND} ./gamMpiTests >& gamMpiTests.out
echo 'GAMERA MPI tests complete.'
echo | tail -n 3 ./gamMpiTests.out
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."

View File

@@ -0,0 +1,43 @@
#!/bin/bash
#PBS -N nonCaseTests2
#PBS -A P28100045
#PBS -l walltime=12:00:00
#PBS -q main
#PBS -l select=2:ncpus=128:mpiprocs=9:ompthreads=128
#PBS -j oe
#PBS -m abe
# NOTE: The user account must be specified on the qsub command line with the
# -A option,
# KAIJUROOTDIR and MODULE_LIST must be set as transferred environment
# variables on the qsub command line.
# Example qsub command:
# qsub -A P28100045 -v $HOME/kaiju,MODULE_LIST='module1 module2 ...'
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Setting up MAGE environment.'
source $KAIJUROOTDIR/scripts/setupEnvironment.sh
echo 'Loading modules.'
module --force purge
module load $MODULE_LIST
echo 'The following modules are loaded:'
module list
echo 'Setting environment variables.'
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
export OMP_NUM_THREADS=128
export MPI_TYPE_DEPTH=32
export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
echo 'Running VOLTRON MPI tests.'
date
${MPICOMMAND} ./voltMpiTests > voltMpiTests.out
date
echo 'VOLTRON MPI tests complete.'
echo | tail -n 3 ./voltMpiTests.out
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."

View File

@@ -1,32 +1,27 @@
#!/bin/bash
#PBS -N nonCaseTests2
#PBS -A P28100045
#PBS -l walltime=12:00:00
#PBS -q main
#PBS -N {{ job_name }}
#PBS -A {{ account }}
#PBS -q {{ queue }}
#PBS -l job_priority={{ job_priority }}
#PBS -l walltime={{ walltime }}
#PBS -l select=2:ncpus=128:mpiprocs=9:ompthreads=128
#PBS -j oe
#PBS -m abe
# NOTE: The user account must be specified on the qsub command line with the
# -A option,
# KAIJUROOTDIR and MODULE_LIST must be set as transferred environment
# variables on the qsub command line.
# Example qsub command:
# qsub -A P28100045 -v $HOME/kaiju,MODULE_LIST='module1 module2 ...'
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Setting up MAGE environment.'
source $KAIJUROOTDIR/scripts/setupEnvironment.sh
echo 'Loading modules.'
module --force purge
module load $MODULE_LIST
echo 'The following modules are loaded:'
{%- for module in modules %}
module load {{ module }}
{%- endfor %}
module list
echo 'Setting up MAGE environment.'
source {{ kaijuhome }}/scripts/setupEnvironment.sh
echo 'Setting environment variables.'
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
export OMP_NUM_THREADS=128
export MPI_TYPE_DEPTH=32
export KMP_STACKSIZE=128M
@@ -34,9 +29,8 @@ echo 'The active environment variables are:'
printenv
echo 'Running VOLTRON MPI tests.'
date
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
${MPICOMMAND} ./voltMpiTests > voltMpiTests.out
date
echo 'VOLTRON MPI tests complete.'
echo | tail -n 3 ./voltMpiTests.out

View File

@@ -0,0 +1,50 @@
#!/bin/bash
#PBS -N unitTestReport
#PBS -A P28100045
#PBS -l walltime=00:10:00
#PBS -q main
#PBS -l select=1:ncpus=128
#PBS -j oe
#PBS -m abe
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
module --force purge
{%- for module in modules %}
module load {{ module }}
{%- endfor %}
module list
echo 'Loading python environment.'
mage_test_root='{{ mage_test_root }}'
export CONDARC="${mage_test_root}/condarc"
export CONDA_ENVS_PATH="${mage_test_root}/conda"
mage_miniconda3="${mage_test_root}/miniconda3"
mage_conda="${mage_miniconda3}/bin/conda"
__conda_setup="$($mage_conda 'shell.bash' 'hook' 2> /dev/null)"
if [ $? -eq 0 ]; then
eval "$__conda_setup"
else
if [ -f "$mage_miniconda3/etc/profile.d/conda.sh" ]; then
. "$mage_miniconda3/etc/profile.d/conda.sh"
else
export PATH="$mage_miniconda3/bin:$PATH"
fi
fi
unset __conda_setup
conda activate kaiju-3.8-testing
echo 'Setting up MAGE environment.'
source {{ kaijuhome }}/scripts/setupEnvironment.sh
echo 'Setting environment variables.'
export MAGE_TEST_SET_ROOT={{ mage_test_set_root }}
export SLACK_BOT_TOKEN={{ slack_bot_token }}
echo 'The active environment variables are:'
printenv
echo 'Generating unit test report.'
python $KAIJUHOME/testingScripts/unitTestReport.py {{ report_options }}
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."