Merge branch 'development' into localdashfix

This commit is contained in:
Jeff Garretson
2025-11-04 14:28:24 -06:00
17 changed files with 988 additions and 685 deletions

View File

@@ -1,5 +1,6 @@
cmake_minimum_required(VERSION 3.20.2)
project(Kaiju Fortran)
project(Kaiju Fortran C)
#K: Adding C to project to deal w/ issues w/ most recent HDF (10/13/25)
# add and search for pfunit (fingers crossed)
list(APPEND CMAKE_PREFIX_PATH "./external")

View File

@@ -83,7 +83,12 @@ if(CMAKE_Fortran_COMPILER_ID MATCHES Intel)
#Base
string(APPEND CMAKE_Fortran_FLAGS " -fPIC -fpconstant")
#Production
set(PROD "-align array64byte -align rec32byte -no-prec-div -fast-transcendentals")
set(PROD "-align array64byte -align rec32byte -no-prec-div")
if (CMAKE_Fortran_COMPILER_VERSION VERSION_LESS 23.1)
#Fast transcendentals removed in ifx
string(APPEND PROD " -fast-transcendentals")
endif()
#Production with Debug Info
set(PRODWITHDEBUGINFO "-traceback -debug all -align array64byte -align rec32byte -no-prec-div -fast-transcendentals")
#Debug
@@ -123,11 +128,24 @@ if(CMAKE_Fortran_COMPILER_ID MATCHES Intel)
endif()
string(APPEND PROD " -march=core-avx2")
string(APPEND PRODWITHDEBUGINFO " -march=core-avx2")
elseif (HOST MATCHES stampede3)
message("You're on Stampede3!")
if (ENABLE_MKL)
string(APPEND CMAKE_Fortran_FLAGS " -qmkl")
endif()
string(APPEND PROD " -xCORE-AVX512")
string(APPEND PRODWITHDEBUGINFO " -xCORE-AVX512")
endif()
#Check Intel Fortran version
if(NOT ALLOW_INVALID_COMPILERS AND CMAKE_Fortran_COMPILER_VERSION VERSION_GREATER "2021.9")
message(FATAL_ERROR "Intel Fortran compilers newer than 2023 (version 2021.8) are not supported. Set the ALLOW_INVALID_COMPILERS variable to ON to force compilation at your own risk.")
if(NOT ALLOW_INVALID_COMPILERS AND CMAKE_Fortran_COMPILER_VERSION VERSION_GREATER "2021.9" AND CMAKE_Fortran_COMPILER_VERSION VERSION_LESS "2025.1")
message(FATAL_ERROR "Intel OneAPI compilers between 2022-2024 have compiler bugs which cause weird numerical errors in our code. You can set the ALLOW_INVALID_COMPILERS variable to ON to force compilation at your own risk. You'll probably get what you deserve.")
endif()
#Check for MKL + intel25
if (ENABLE_MKL AND (CMAKE_Fortran_COMPILER_VERSION VERSION_GREATER "2024"))
message(WARNING "Intel OneAPI MKL has been found to fail in weird ways and should probably be avoided. But hey, do what you want. I'm a warning message, not a cop.")
endif()
elseif(CMAKE_Fortran_COMPILER_ID MATCHES GNU)

View File

@@ -491,8 +491,9 @@ module ringutils
!Map i to itself
ip = i
!Next do k, map via periodicity
!NOTE: This is assuming you have all
!Next do k, map via periodicity. Have to do k first otherwise have to deal w/ right hand becoming left
!NOTE: This is assuming you have all k cells (ie, no mpi decomp in KDIR)
if (k < Grid%ks) then
kp = k + Np
elseif (k > Grid%ke) then
@@ -501,18 +502,19 @@ module ringutils
kp = k
endif
!Finally do j
!Now handle ip,j,kp => ip,jp,kp
jp = j ! default value
if ( Model%Ring%doS .and. (j<Grid%js) ) then
!js-1 => js
jp = Grid%js + (Grid%js-j) - 1
kp = WrapK(k,Np)
kp = WrapK(kp,Np)
endif
if ( Model%Ring%doE .and. (j>Grid%je) ) then
!je+1 => je
jp = Grid%je - (j-Grid%je) + 1
kp = WrapK(k,Np)
kp = WrapK(kp,Np)
endif
end subroutine lfmIJKcc

View File

@@ -79,13 +79,11 @@ module imag2mhd_interface
!Below inner boundary, do dipole projection
isGoodCC(i,j,k) = .true.
xyz = Gr%xyzcc(i,j,k,:) !Gamera grid center
call Proj2Rad(xyz,Rion,x1,x2)
Gr%Gas0(i,j,k,PROJLAT) = x1
call NHProj(xyz,x1,x2)
Gr%Gas0(i,j,k,PROJLAT) = x1 !Must project to NH
Gr%Gas0(i,j,k,PROJLON) = x2
else
!Get value from xyzsquish
if ( all(vApp%chmp2mhd%isGood(i:i+1,j:j+1,k:k+1)) ) then
!All values are good, so just do this thing
call SquishCorners(vApp%chmp2mhd%xyzSquish(i:i+1,j:j+1,k:k+1,1),Qs)
@@ -123,7 +121,7 @@ module imag2mhd_interface
if (i < Gr%is) then
!Use dipole projection
xyz = Gr%xyz(i,j,k,:) !Gamera grid corner
call Proj2Rad(xyz,Rion,x1,x2)
call NHProj(xyz,x1,x2)
isG = .true.
else
x1 = vApp%chmp2mhd%xyzSquish(i,j,k,1)
@@ -284,41 +282,43 @@ module imag2mhd_interface
real(rp), intent(inout) :: Q(Gr%isg:Gr%ieg,Gr%jsg:Gr%jeg,Gr%ksg:Gr%keg)
integer :: i,j,k,ip,jp,kp
logical :: isActive
!!$OMP PARALLEL DO default(shared) collapse(2) &
!!$OMP private(i,j,k,isActive,ip,jp,kp)
! this causes a race condition copying values between ghost cells
! probably a false positive since some of the cells are just copying
! values onto themselves, but easier to remove for now
!$OMP PARALLEL DO default(shared) collapse(2) &
!$OMP private(i,j,k,ip,jp,kp)
do k=Gr%ksg,Gr%keg
do j=Gr%jsg,Gr%jeg
do i=Gr%isg,Gr%ieg
isActive = (j >= Gr%js) .and. (j <= Gr%je) .and. &
(k >= Gr%ks) .and. (k <= Gr%ks)
if(.not. isActive) then
!If still here map this ghost to active and set value based on active
if (.not. isPhysical(Model,Gr,i,j,k)) then
!This is a geometric ghost so we can map it to a physical cell
call lfmIJKcc(Model,Gr,i,j,k,ip,jp,kp)
Q(i,j,k) = Q(ip,jp,kp)
endif
endif !isPhys
enddo
enddo !j
enddo !k
end subroutine FillGhostsCC
!Project xyz along dipole to R0 and return lat (x1) and lon (x2)
subroutine Proj2Rad(xyz,R0,x1,x2)
real(rp), intent(in ) :: xyz(NDIM), R0
!Checks if cell is "physical" as opposed to "geometric".
!Ie, i ghosts are physical but j/k ghosts are geometric (they point to other physical cells)
function isPhysical(Model,Gr,i,j,k)
type(Model_T), intent(in) :: Model
type(Grid_T) , intent(in) :: Gr
integer, intent(in) :: i,j,k
logical :: isPhysical
isPhysical = (j >= Gr%js) .and. (j <= Gr%je) .and. &
(k >= Gr%ks) .and. (k <= Gr%ke)
end function isPhysical
!Get azimuth and invariant latitude
subroutine NHProj(xyz,x1,x2)
real(rp), intent(in ) :: xyz(NDIM)
real(rp), intent(out) :: x1,x2
real(rp), dimension(NDIM) :: xyz0
xyz0 = DipoleShift(xyz,R0)
x1 = asin(xyz0(ZDIR)/R0) !Lat
x2 = katan2(xyz0(YDIR),xyz0(XDIR)) !katan => [0,2pi] instead of [-pi,pi]
end subroutine Proj2Rad
x1 = InvLatitude(xyz)
x2 = katan2(xyz(YDIR),xyz(XDIR)) !katan => [0,2pi] instead of [-pi,pi]
end subroutine NHProj
end subroutine

View File

@@ -86,7 +86,7 @@ module gamCouple_mpi_G2V
! initialize F08 MPI objects
App%couplingComm = MPI_COMM_NULL
App%zeroArraytypes = (/ MPI_DATATYPE_NULL /)
App%zeroArraytypes(:) = (/ MPI_INTEGER /) ! = (/ MPI_DATATYPE_NULL /)
! split voltron helpers off of the communicator
! split couplingPoolComm into a communicator with only the non-helper voltron rank

View File

@@ -559,13 +559,13 @@ module gamCouple_mpi_V2G
subroutine sendGameraCplDataMpi(gCplApp, CouplingTargetT)
class(gamCouplerMpi_volt_T), intent(inout) :: gCplApp
real(rp), intent(in) :: CouplingTargetT
real(rp), intent(inout) :: CouplingTargetT
call sendShallowCplDataMpi(gCplApp)
if(gCplApp%doDeep) call sendDeepCplDataMpi(gCplApp)
call sendCplTimeMpi(gCplApp, CouplingTargetT)
end subroutine
end subroutine sendGameraCplDataMpi
subroutine sendShallowCplDataMpi(gCplApp)
class(gamCouplerMpi_volt_T), intent(inout) :: gCplApp
@@ -628,14 +628,14 @@ module gamCouple_mpi_V2G
subroutine sendCplTimeMpi(gCplApp, CouplingTargetT)
class(gamCouplerMpi_volt_T), intent(inout) :: gCplApp
real(rp), intent(in) :: CouplingTargetT
real(rp), intent(inout) :: CouplingTargetT
integer :: ierr
! Send Target Time for next coupling
call mpi_bcast(CouplingTargetT,1,MPI_MYFLOAT, gCplApp%myRank, gCplApp%couplingComm, ierr)
end subroutine
end subroutine sendCplTimeMpi
end module

View File

@@ -107,7 +107,7 @@ module volthelpers_mpi
! chimp data update functions
subroutine sendChimpStateData(ebState, vHelpComm)
type(ebState_T), intent(in) :: ebState
type(ebState_T), intent(inout) :: ebState
type(MPI_Comm), intent(in) :: vHelpComm
integer :: ierr, length
@@ -195,7 +195,7 @@ module volthelpers_mpi
call mpi_Abort(MPI_COMM_WORLD, 1, ierr)
end if
end subroutine
end subroutine sendChimpStateData
subroutine recvChimpStateData(ebState, vHelpComm)
type(ebState_T), intent(inout) :: ebState
@@ -272,7 +272,7 @@ module volthelpers_mpi
end subroutine
subroutine sendChimpUpdate(vApp)
type(voltAppMpi_T), intent(in) :: vApp
type(voltAppMpi_T), intent(inout) :: vApp
integer :: ierr, length
character( len = MPI_MAX_ERROR_STRING) :: message
@@ -317,7 +317,7 @@ module volthelpers_mpi
call mpi_Abort(MPI_COMM_WORLD, 1, ierr)
end if
end subroutine
end subroutine sendChimpUpdate
subroutine recvChimpUpdate(vApp)
type(voltAppMpi_T), intent(inout) :: vApp
@@ -374,14 +374,15 @@ module volthelpers_mpi
type(voltAppMpi_T), intent(in) :: vApp
integer, intent(in) :: rType
integer :: ierr
integer :: ierr,wtf
type(MPI_Request) :: helpReq
wtf = rType
! async to match waiting helper nodes
call mpi_Ibcast(rType, 1, MPI_INTEGER, 0, vApp%vHelpComm, helpReq, ierr)
call mpi_Ibcast(wtf, 1, MPI_INTEGER, 0, vApp%vHelpComm, helpReq, ierr)
call mpi_wait(helpReq, MPI_STATUS_IGNORE, ierr)
end subroutine
end subroutine vhRequestType
subroutine vhReqStep(vApp)
type(voltAppMpi_T), intent(inout) :: vApp

View File

@@ -9,8 +9,6 @@
#PBS -j oe
#PBS -m abe
# This script just builds the MAGE software.
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'

View File

@@ -0,0 +1,152 @@
#!/usr/bin/env python
"""Send a message to Slack.
Send a message to Slack.
Authors
-------
Eric Winter
"""
# Import standard modules.
import datetime
# import glob
import os
import sys
# # Import 3rd-party modules.
# Import project modules.
import common
# Program constants
# Program description.
DESCRIPTION = "Send a message to Slack."
# # Root of directory tree for this set of tests.
# MAGE_TEST_SET_ROOT = os.environ['MAGE_TEST_SET_ROOT']
# # Directory for unit tests
# UNIT_TEST_DIRECTORY = os.path.join(MAGE_TEST_SET_ROOT, 'unitTest')
# # glob pattern for naming unit test directories
# UNIT_TEST_DIRECTORY_GLOB_PATTERN = 'unitTest_*'
# # Name of build subdirectory containing binaries
# BUILD_BIN_DIR = 'bin'
# # Name of file containing job IDs for each unit test directory.
# JOB_ID_LIST_FILE = 'jobs.txt'
def create_command_line_parser():
"""Create the command-line argument parser.
Create the parser for command-line arguments.
Parameters
----------
None
Returns
-------
parser : argparse.ArgumentParser
Command-line argument parser for this script.
Raises
------
None
"""
parser = common.create_command_line_parser(DESCRIPTION)
parser.add_argument(
"message",
default="",
help="Message to send to Slack (default: %(default)s)"
)
return parser
def send_slack_message(args: dict = None):
"""Send a message to Slack.
Send a message to Slack.
Parameters
----------
args : dict
Dictionary of program options.
Returns
-------
None
Raises
------
None
"""
# Local convenience variables.
debug = args["debug"]
be_loud = args["loud"]
slack_on_fail = args["slack_on_fail"]
is_test = args["test"]
verbose = args["verbose"]
message = args["message"]
# ------------------------------------------------------------------------
if debug:
print(f"Starting {sys.argv[0]} at {datetime.datetime.now()}")
print(f"Current directory is {os.getcwd()}")
# ------------------------------------------------------------------------
# Create the Slack client.
slack_client = common.slack_create_client()
slack_response_summary = common.slack_send_message(
slack_client, message, is_test=is_test
)
# ------------------------------------------------------------------------
if debug:
print(f"Ending {sys.argv[0]} at {datetime.datetime.now()}")
def main():
"""Begin main program.
This is the main program code.
Parameters
----------
None
Returns
-------
None
Raises
------
None
"""
# Set up the command-line parser.
parser = create_command_line_parser()
# Parse the command-line arguments.
args = parser.parse_args()
if args.debug:
print(f"args = {args}")
# ------------------------------------------------------------------------
# Call the main program logic. Note that the Namespace object (args)
# returned from the option parser is converted to a dict using vars().
send_slack_message(vars(args))
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,44 @@
#!/bin/bash
#PBS -N {{ job_name }}
#PBS -A {{ account }}
#PBS -q {{ queue }}
#PBS -l job_priority={{ job_priority }}
#PBS -l select=1:ncpus=128
#PBS -l walltime={{ walltime }}
#PBS -j oe
#PBS -m abe
# Abort script on any error.
set -e
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
module --force purge
{%- for module in modules %}
module load {{ module }}
{%- endfor %}
echo 'The currently loaded modules are:'
module list
echo 'The active environment variables are:'
printenv
echo 'Copying pFUnit binaries.'
pfunit_dir="{{ mage_test_root }}/pfunit/pFUnit-4.2.0/ifort-23-mpich-derecho"
kaiju_external_dir="{{ kaijuhome }}/external"
cp -rp "${pfunit_dir}/FARGPARSE-1.1" "${kaiju_external_dir}/"
cp -rp "${pfunit_dir}/GFTL-1.3" "${kaiju_external_dir}/"
cp -rp "${pfunit_dir}/GFTL_SHARED-1.2" "${kaiju_external_dir}/"
cp -rp "${pfunit_dir}/PFUNIT-4.2" "${kaiju_external_dir}/"
# Build the code.
cmd="{{ cmake_cmd }} >& cmake.out"
echo $cmd
eval $cmd
cmd="{{ make_cmd }} >& make.out"
echo $cmd
eval $cmd
echo "Job $PBS_JOBID ended at `date` on `hostname` in directory `pwd`."

File diff suppressed because it is too large Load Diff

View File

@@ -111,8 +111,7 @@ def main():
print(f"Checking unit test results in {unit_test_directory}.")
# Move to the directory containing the unit test results.
path = os.path.join(UNIT_TEST_DIRECTORY, unit_test_directory,
BUILD_BIN_DIR)
path = os.path.join(UNIT_TEST_DIRECTORY, unit_test_directory)
if debug:
print(f"path = {path}")
os.chdir(path)
@@ -136,19 +135,27 @@ def main():
# NOTE: This needs to be reorganized.
# Compute the names of the job log files.
job_file_0 = f"genTestData.o{job_ids[0]}" # 0 OKs
job_file_1 = f"runCaseTests.o{job_ids[1]}" # 2 OKs
job_file_2 = f"runNonCaseTests1.o{job_ids[2]}" # 6 OKs
job_file_3 = f"runNonCaseTests2.o{job_ids[3]}" # 1 OK
if debug:
print(f"job_file_0 = {job_file_0}")
print(f"job_file_1 = {job_file_1}")
print(f"job_file_2 = {job_file_2}")
print(f"job_file_3 = {job_file_3}")
# 0 OKs
job_file_build = f"../unitTest-build.o{job_ids[0]}"
# 0 OKs
job_file_genTestData = f"../unitTest-genTestData.o{job_ids[1]}"
# 2 OKs
job_file_caseTests = f"../unitTest-caseTests.o{job_ids[2]}"
# 6 OKs
job_file_noncaseTests1 = f"../unitTest-noncaseTests1.o{job_ids[3]}"
# 1 OK
job_file_noncaseTests2 = f"../unitTest-noncaseTests2.o{job_ids[4]}"
# Combine the results of each test log file.
os.chdir("bin")
bigFile = []
job_files = [job_file_0, job_file_1, job_file_2, job_file_3]
job_files = [
job_file_build,
job_file_genTestData,
job_file_caseTests,
job_file_noncaseTests1,
job_file_noncaseTests2,
]
for job_file in job_files:
with open(job_file, 'r', encoding='utf-8') as f:
bigFile += f.readlines()
@@ -234,12 +241,14 @@ def main():
)
if debug:
print(f"slack_response_summary = {slack_response_summary}")
# Also write a summary file to the root folder of this test
with open(os.path.join(MAGE_TEST_SET_ROOT,'testSummary.out'), 'w', encoding='utf-8') as f:
with open(os.path.join(
MAGE_TEST_SET_ROOT, 'testSummary.out'), 'w', encoding='utf-8'
) as f:
f.write(test_report_details_string)
f.write('\n')
# ------------------------------------------------------------------------
if debug:

View File

@@ -9,6 +9,9 @@
#PBS -j oe
#PBS -m abe
# Abort script on any error.
set -e
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
@@ -28,6 +31,16 @@ export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
# Move to the directory containing the compiled code.
cd bin
echo 'Copying input files.'
test_inputs_dir="{{ mage_test_root }}/unit_test_inputs"
cp "${test_inputs_dir}/bcwind.h5" .
cp "${test_inputs_dir}/geo_mpi.xml" .
cp "${test_inputs_dir}/lfmD.h5" .
cp "${test_inputs_dir}/raijuconfig.h5" .
echo 'Generating data for testing.'
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"
$MPICOMMAND ./voltron_mpi.x cmiD_deep_8_genRes.xml >& cmiD_deep_8_genRes.out

View File

@@ -9,6 +9,9 @@
#PBS -j oe
#PBS -m abe
# Abort script on any error.
set -e
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
@@ -28,6 +31,9 @@ export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
# Move to the directory containing the compiled code.
cd bin
echo 'Running non-MPI test cases.'
./caseTests >& caseTests.out
echo 'Non-MPI test cases complete.'

View File

@@ -9,6 +9,9 @@
#PBS -j oe
#PBS -m abe
# Abort script on any error.
set -e
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
@@ -28,6 +31,9 @@ export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
# Move to the directory containing the compiled code.
cd bin
echo 'Running GAMERA tests.'
date
./gamTests >& gamTests.out

View File

@@ -9,6 +9,9 @@
#PBS -j oe
#PBS -m abe
# Abort script on any error.
set -e
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
@@ -28,6 +31,9 @@ export KMP_STACKSIZE=128M
echo 'The active environment variables are:'
printenv
# Move to the directory containing the compiled code.
cd bin
echo 'Running VOLTRON MPI tests.'
date
MPICOMMAND="mpiexec $KAIJUHOME/scripts/preproc/pinCpuCores.sh"

View File

@@ -9,6 +9,9 @@
#PBS -j oe
#PBS -m abe
# Abort script on any error.
set -e
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
echo 'Loading modules.'
@@ -40,6 +43,7 @@ else
module load conda
fi
conda activate {{ conda_environment }}
echo "The active conda environment is ${CONDA_DEFAULT_ENV}."
echo 'Setting up MAGE environment.'
source {{ kaijuhome }}/scripts/setupEnvironment.sh
@@ -51,6 +55,13 @@ export BRANCH_OR_COMMIT={{ branch_or_commit }}
echo 'The active environment variables are:'
printenv
# Move to the directory containing the compiled code.
cd bin
if [[ $? -eq 1 ]]; then
python $KAIJUHOME/testingScripts/send_slack_message.py "Unit test build failed in `pwd`!"
exit 1
fi
echo 'Generating unit test report.'
python $KAIJUHOME/testingScripts/unitTestReport.py {{ report_options }} >& unitTestReport.out