Overhaul unit test architecture to better handle script failures.

This commit is contained in:
Eric Winter
2025-10-21 11:05:42 -06:00
parent 3cfe636d57
commit 91b51dd8f5
10 changed files with 918 additions and 637 deletions

View File

@@ -9,8 +9,6 @@
#PBS -j oe
#PBS -m abe
# This script just builds the MAGE software.
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'

View File

@@ -0,0 +1,152 @@
#!/usr/bin/env python
"""Send a message to Slack.
Send a message to Slack.
Authors
-------
Eric Winter
"""
# Import standard modules.
import datetime
# import glob
import os
import sys
# # Import 3rd-party modules.
# Import project modules.
import common
# Program constants
# Program description.
DESCRIPTION = "Send a message to Slack."
# # Root of directory tree for this set of tests.
# MAGE_TEST_SET_ROOT = os.environ['MAGE_TEST_SET_ROOT']
# # Directory for unit tests
# UNIT_TEST_DIRECTORY = os.path.join(MAGE_TEST_SET_ROOT, 'unitTest')
# # glob pattern for naming unit test directories
# UNIT_TEST_DIRECTORY_GLOB_PATTERN = 'unitTest_*'
# # Name of build subdirectory containing binaries
# BUILD_BIN_DIR = 'bin'
# # Name of file containing job IDs for each unit test directory.
# JOB_ID_LIST_FILE = 'jobs.txt'
def create_command_line_parser():
"""Create the command-line argument parser.
Create the parser for command-line arguments.
Parameters
----------
None
Returns
-------
parser : argparse.ArgumentParser
Command-line argument parser for this script.
Raises
------
None
"""
parser = common.create_command_line_parser(DESCRIPTION)
parser.add_argument(
"message",
default="",
help="Message to send to Slack (default: %(default)s)"
)
return parser
def send_slack_message(args: dict = None):
"""Send a message to Slack.
Send a message to Slack.
Parameters
----------
args : dict
Dictionary of program options.
Returns
-------
None
Raises
------
None
"""
# Local convenience variables.
debug = args["debug"]
be_loud = args["loud"]
slack_on_fail = args["slack_on_fail"]
is_test = args["test"]
verbose = args["verbose"]
message = args["message"]
# ------------------------------------------------------------------------
if debug:
print(f"Starting {sys.argv[0]} at {datetime.datetime.now()}")
print(f"Current directory is {os.getcwd()}")
# ------------------------------------------------------------------------
# Create the Slack client.
slack_client = common.slack_create_client()
slack_response_summary = common.slack_send_message(
slack_client, message, is_test=is_test
)
# ------------------------------------------------------------------------
if debug:
print(f"Ending {sys.argv[0]} at {datetime.datetime.now()}")
def main():
"""Begin main program.
This is the main program code.
Parameters
----------
None
Returns
-------
None
Raises
------
None
"""
# Set up the command-line parser.
parser = create_command_line_parser()
# Parse the command-line arguments.
args = parser.parse_args()
if args.debug:
print(f"args = {args}")
# ------------------------------------------------------------------------
# Call the main program logic. Note that the Namespace object (args)
# returned from the option parser is converted to a dict using vars().
send_slack_message(vars(args))
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,44 @@
#!/bin/bash
#PBS -N {{ job_name }}
#PBS -A {{ account }}
#PBS -q {{ queue }}
#PBS -l job_priority={{ job_priority }}
#PBS -l select=1:ncpus=128
#PBS -l walltime={{ walltime }}
#PBS -j oe
#PBS -m abe
# Abort script on any error.
set -e
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
module --force purge
{%- for module in modules %}
module load {{ module }}
{%- endfor %}
echo 'The currently loaded modules are:'
module list
echo 'The active environment variables are:'
printenv
echo 'Copying pFUnit binaries.'
pfunit_dir="{{ mage_test_root }}/pfunit/pFUnit-4.2.0/ifort-23-mpich-derecho"
kaiju_external_dir="{{ kaijuhome }}/external"
cp -rp "${pfunit_dir}/FARGPARSE-1.1" "${kaiju_external_dir}/"
cp -rp "${pfunit_dir}/GFTL-1.3" "${kaiju_external_dir}/"
cp -rp "${pfunit_dir}/GFTL_SHARED-1.2" "${kaiju_external_dir}/"
cp -rp "${pfunit_dir}/PFUNIT-4.2" "${kaiju_external_dir}/"
# Build the code.
cmd="{{ cmake_cmd }} >& cmake.out"
echo $cmd
eval $cmd
cmd="{{ make_cmd }} >& make.out"
echo $cmd
eval $cmd
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."

File diff suppressed because it is too large Load Diff

View File

@@ -111,8 +111,7 @@ def main():
print(f"Checking unit test results in {unit_test_directory}.")
# Move to the directory containing the unit test results.
path = os.path.join(UNIT_TEST_DIRECTORY, unit_test_directory,
BUILD_BIN_DIR)
path = os.path.join(UNIT_TEST_DIRECTORY, unit_test_directory)
if debug:
print(f"path = {path}")
os.chdir(path)
@@ -136,19 +135,27 @@ def main():
# NOTE: This needs to be reorganized.
# Compute the names of the job log files.
job_file_0 = f"genTestData.o{job_ids[0]}" # 0 OKs
job_file_1 = f"runCaseTests.o{job_ids[1]}" # 2 OKs
job_file_2 = f"runNonCaseTests1.o{job_ids[2]}" # 6 OKs
job_file_3 = f"runNonCaseTests2.o{job_ids[3]}" # 1 OK
if debug:
print(f"job_file_0 = {job_file_0}")
print(f"job_file_1 = {job_file_1}")
print(f"job_file_2 = {job_file_2}")
print(f"job_file_3 = {job_file_3}")
# 0 OKs
job_file_build = f"../unitTest-build.o{job_ids[0]}"
# 0 OKs
job_file_genTestData = f"../unitTest-genTestData.o{job_ids[1]}"
# 2 OKs
job_file_caseTests = f"../unitTest-caseTests.o{job_ids[2]}"
# 6 OKs
job_file_noncaseTests1 = f"../unitTest-noncaseTests1.o{job_ids[3]}"
# 1 OK
job_file_noncaseTests2 = f"../unitTest-noncaseTests2.o{job_ids[4]}"
# Combine the results of each test log file.
os.chdir("bin")
bigFile = []
job_files = [job_file_0, job_file_1, job_file_2, job_file_3]
job_files = [
job_file_build,
job_file_genTestData,
job_file_caseTests,
job_file_noncaseTests1,
job_file_noncaseTests2,
]
for job_file in job_files:
with open(job_file, 'r', encoding='utf-8') as f:
bigFile += f.readlines()
@@ -234,12 +241,14 @@ def main():
)
if debug:
print(f"slack_response_summary = {slack_response_summary}")
# Also write a summary file to the root folder of this test
with open(os.path.join(MAGE_TEST_SET_ROOT,'testSummary.out'), 'w', encoding='utf-8') as f:
with open(os.path.join(
MAGE_TEST_SET_ROOT, 'testSummary.out'), 'w', encoding='utf-8'
) as f:
f.write(test_report_details_string)
f.write('\n')
# ------------------------------------------------------------------------
if debug:

View File

@@ -9,6 +9,9 @@
#PBS -j oe
#PBS -m abe
# Abort script on any error.
set -e
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
@@ -28,6 +31,16 @@ export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
# Move to the directory containing the compiled code.
cd bin
echo 'Copying input files.'
test_inputs_dir="{{ mage_test_root }}/unit_test_inputs"
cp "${test_inputs_dir}/bcwind.h5" .
cp "${test_inputs_dir}/geo_mpi.xml" .
cp "${test_inputs_dir}/lfmD.h5" .
cp "${test_inputs_dir}/raijuconfig.h5" .
echo 'Generating data for testing.'
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
$MPICOMMAND ./voltron_mpi.x cmiD_deep_8_genRes.xml >& cmiD_deep_8_genRes.out

View File

@@ -9,6 +9,9 @@
#PBS -j oe
#PBS -m abe
# Abort script on any error.
set -e
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
@@ -28,6 +31,9 @@ export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
# Move to the directory containing the compiled code.
cd bin
echo 'Running non-MPI test cases.'
./caseTests >& caseTests.out
echo 'Non-MPI test cases complete.'

View File

@@ -9,6 +9,9 @@
#PBS -j oe
#PBS -m abe
# Abort script on any error.
set -e
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
@@ -28,6 +31,9 @@ export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
# Move to the directory containing the compiled code.
cd bin
echo 'Running GAMERA tests.'
date
./gamTests >& gamTests.out

View File

@@ -9,6 +9,9 @@
#PBS -j oe
#PBS -m abe
# Abort script on any error.
set -e
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
@@ -28,6 +31,9 @@ export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
# Move to the directory containing the compiled code.
cd bin
echo 'Running VOLTRON MPI tests.'
date
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"

View File

@@ -9,6 +9,9 @@
#PBS -j oe
#PBS -m abe
# Abort script on any error.
set -e
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
@@ -40,6 +43,7 @@ else
module load conda
fi
conda activate {{ conda_environment }}
echo "The active conda environment is ${CONDA_DEFAULT_ENV}."
echo 'Setting up MAGE environment.'
source {{ kaijuhome }}/scripts/setupEnvironment.sh
@@ -51,6 +55,13 @@ export BRANCH_OR_COMMIT={{ branch_or_commit }}
echo 'The active environment variables are:'
printenv
# Move to the directory containing the compiled code.
cd bin
if [[ $? -eq 1 ]]; then
python $KAIJUHOME/testingScripts/send_slack_message.py "Unit test build failed in `pwd`!"
exit 1
fi
echo 'Generating unit test report.'
python $KAIJUHOME/testingScripts/unitTestReport.py {{ report_options }} >& unitTestReport.out