mirror of
https://github.com/JHUAPL/kaiju.git
synced 2026-01-09 15:17:56 -05:00
Merged development into dev_citation
This commit is contained in:
4
.gitignore
vendored
4
.gitignore
vendored
@@ -8,11 +8,15 @@ external/FARGPARSE-*/
|
||||
external/GFTL-*/
|
||||
external/GFTL_SHARED-*/
|
||||
external/PFUNIT-*/
|
||||
|
||||
# skip F90 files in the tests folder, except in specific subfolders
|
||||
tests/*/*.F90
|
||||
!tests/helperCode/*.F90
|
||||
!tests/helperCode_mpi/*.F90
|
||||
|
||||
# any local automated tests that users have run
|
||||
test_runs/
|
||||
|
||||
# Pre-compile generated files
|
||||
src/base/git_info.F90
|
||||
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
cmake_minimum_required(VERSION 3.20.2)
|
||||
project(Kaiju Fortran)
|
||||
|
||||
# use the new version of CMP0074, this tells cmake to use <PACKAGE>_ROOT environment
|
||||
# variables when looking for packages
|
||||
cmake_policy(SET CMP0074 NEW)
|
||||
|
||||
# add and search for pfunit (fingers crossed)
|
||||
list(APPEND CMAKE_PREFIX_PATH "./external")
|
||||
find_package(PFUNIT QUIET)
|
||||
@@ -161,22 +157,6 @@ add_executable(gamera.x src/drivers/gamerax.F90 ${GAMIC})
|
||||
target_link_libraries(gamera.x gamlib baselib)
|
||||
add_dependencies(gamera gamera.x)
|
||||
|
||||
#-------------
|
||||
#Kaiju: Gamera helio
|
||||
message("Adding Gamera Helio module ...")
|
||||
|
||||
#Add source
|
||||
#add_subdirectory(src/gamera)
|
||||
|
||||
#Print gamera helio info
|
||||
#message("\tBricksize is ${bricksize}")
|
||||
message("\tIC file is ${GAMHELIC}")
|
||||
add_custom_target(gamhelio ALL)
|
||||
message("\tAdding executable gamhelio.x")
|
||||
add_executable(gamhelio.x src/drivers/gamerax.F90 ${GAMHELIC})
|
||||
target_link_libraries(gamhelio.x gamlib baselib)
|
||||
add_dependencies(gamera gamhelio.x)
|
||||
|
||||
#-------------
|
||||
#Kaiju: Dragon King
|
||||
message("Adding dragonking module ...")
|
||||
@@ -238,6 +218,18 @@ add_executable(voltron.x src/drivers/voltronx.F90)
|
||||
target_link_libraries(voltron.x baselib voltlib gamlib dragonkinglib remixlib chimplib raijulib)
|
||||
add_dependencies(voltron voltron.x)
|
||||
|
||||
#-------------
|
||||
#Kaiju: Gamera helio
|
||||
message("Adding Gamera Helio module ...")
|
||||
#Print gamera helio info
|
||||
message("\tBricksize is ${bricksize}")
|
||||
message("\tIC file is ${GAMHELIC}")
|
||||
add_custom_target(gamhelio ALL)
|
||||
message("\tAdding executable gamhelio.x")
|
||||
add_executable(gamhelio.x src/drivers/gamerax.F90 ${GAMHELIC})
|
||||
target_link_libraries(gamhelio.x gamlib baselib)
|
||||
add_dependencies(gamera gamhelio.x)
|
||||
|
||||
if(ENABLE_MPI)
|
||||
#-------------
|
||||
#Kaiju: Base MPI
|
||||
@@ -272,6 +264,7 @@ if(ENABLE_MPI)
|
||||
#-------------
|
||||
#Kaiju: Gamera Helio MPI
|
||||
message("Adding Gamera Helio MPI module ...")
|
||||
message("\tIC file is ${GAMHELIC}")
|
||||
add_custom_target(gamhelio_mpi ALL)
|
||||
message("\tAdding executable gamhelio_mpi.x")
|
||||
add_executable(gamhelio_mpi.x src/drivers/gamera_mpix.F90 ${GAMHELIC})
|
||||
|
||||
@@ -9,18 +9,18 @@
|
||||
},
|
||||
"wsa_file": {
|
||||
"LEVEL": "BASIC",
|
||||
"prompt": "Path to WSA solar wind file to use",
|
||||
"prompt": "Path to WSA boundary condition file to use",
|
||||
"default": "wsa.fits"
|
||||
},
|
||||
"start_date": {
|
||||
"LEVEL": "BASIC",
|
||||
"prompt": "Start date for simulation (yyyy-mm-ddThh:mm:ss)",
|
||||
"default": "2001-06-01T23:00:00"
|
||||
"default": "2017-08-02T19:44:23"
|
||||
},
|
||||
"stop_date": {
|
||||
"LEVEL": "BASIC",
|
||||
"prompt": "Stop date for simulation (yyyy-mm-ddThh:mm:ss)",
|
||||
"default": "2001-06-02T01:00:00"
|
||||
"default": "2017-08-02T21:44:23"
|
||||
},
|
||||
"use_segments": {
|
||||
"LEVEL": "BASIC",
|
||||
@@ -383,11 +383,6 @@
|
||||
"prompt": "Cadence for status updates on screen in simulated hours",
|
||||
"default": "5.0"
|
||||
},
|
||||
"dtCon": {
|
||||
"LEVEL": "EXPERT",
|
||||
"prompt": "Cadence for status updates on screen in simulated hours",
|
||||
"default": "5.0"
|
||||
},
|
||||
"doTimer": {
|
||||
"LEVEL": "EXPERT",
|
||||
"prompt": "Code timing output enabled",
|
||||
|
||||
@@ -28,6 +28,7 @@ import json
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import math
|
||||
|
||||
# Import 3rd-party modules.
|
||||
import netCDF4
|
||||
@@ -356,7 +357,7 @@ def prompt_user_for_run_options(args):
|
||||
for on in ["use_segments"]:
|
||||
od[on]["default"] = "Y"
|
||||
o[on] = makeitso.get_run_option(on, od[on], mode)
|
||||
if o["use_segments"] == "Y":
|
||||
if o["use_segments"].upper() == "Y":
|
||||
for on in ["segment_duration"]:
|
||||
o[on] = makeitso.get_run_option(on, od[on], mode)
|
||||
else:
|
||||
@@ -364,7 +365,7 @@ def prompt_user_for_run_options(args):
|
||||
|
||||
# Compute the number of segments based on the simulation duration and
|
||||
# segment duration, add 1 if there is a remainder.
|
||||
if o["use_segments"] == "Y":
|
||||
if o["use_segments"].upper() == "Y":
|
||||
num_segments = simulation_duration/float(o["segment_duration"])
|
||||
if num_segments > int(num_segments):
|
||||
num_segments += 1
|
||||
@@ -564,7 +565,8 @@ def main():
|
||||
|
||||
segment_duration = float(engage_options["simulation"]["segment_duration"])
|
||||
makeitso_options["voltron"]["time"]["tFin"] = int((t1-t0).total_seconds())
|
||||
makeitso_options["pbs"]["num_segments"] = str(int((t1-t0).total_seconds()/segment_duration))
|
||||
num_segments = math.ceil((t1-t0).total_seconds()/segment_duration)
|
||||
makeitso_options["pbs"]["num_segments"] = str(num_segments)
|
||||
select2 = 1 + int(makeitso_options["pbs"]["num_helpers"])
|
||||
makeitso_options["pbs"]["select2"] = str(select2)
|
||||
|
||||
@@ -652,7 +654,9 @@ def main():
|
||||
|
||||
# Create the PBS job scripts.
|
||||
pbs_scripts, submit_all_jobs_script = create_pbs_scripts(engage_options,makeitso_options, makeitso_pbs_scripts, tiegcm_options, tiegcm_inp_scripts, tiegcm_pbs_scripts)
|
||||
print(f"pbs_scripts = {pbs_scripts}")
|
||||
print(f"GR_pbs_scripts = {makeitso_pbs_scripts}")
|
||||
print(f"Tiegcm_pbs_scripts = {tiegcm_pbs_scripts}")
|
||||
print(f"GTR_pbs_scripts = {pbs_scripts}")
|
||||
print(f"submit_all_jobs_script = {submit_all_jobs_script}")
|
||||
|
||||
|
||||
|
||||
60
scripts/makeitso/makeitso.py
Executable file → Normal file
60
scripts/makeitso/makeitso.py
Executable file → Normal file
@@ -33,6 +33,7 @@ import datetime
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import math
|
||||
|
||||
# Import 3rd-party modules.
|
||||
import h5py
|
||||
@@ -419,7 +420,7 @@ def prompt_user_for_run_options(option_descriptions: dict, args: dict):
|
||||
# condition file can be generated.
|
||||
for on in ["bcwind_available"]:
|
||||
o[on] = get_run_option(on, od[on], mode)
|
||||
if o["bcwind_available"] == "Y":
|
||||
if o["bcwind_available"].upper() == "Y":
|
||||
for on in ["bcwind_file"]:
|
||||
o[on] = get_run_option(on, od[on], mode)
|
||||
# Fetch the start and stop date from the bcwind file.
|
||||
@@ -889,26 +890,28 @@ def create_ini_files(options: dict, args: dict):
|
||||
# Set default value for padding to tFin for coupling.
|
||||
tfin_padding = 0.0
|
||||
# Engage modifications to parameters.
|
||||
# If TIEGCM coupling is specified, warmup segments are calculated
|
||||
# based on gr_warm_up_time and segment duration. If the segment
|
||||
# duration is not evenly divisible by gr_warm_up_time, the
|
||||
# warmup segment duration is set to gr_warm_up_time/4.
|
||||
# The number of warmup segments is set to gr_warm_up_time/
|
||||
# warmup_segment_duration.
|
||||
# If TIEGCM coupling is specified, calculate warmup segments based
|
||||
# on gr_warm_up_time and segment_duration.
|
||||
# If gr_warm_up_time is an exact multiple of segment_duration,
|
||||
# use segment_duration for each warmup segment.
|
||||
# If gr_warm_up_time is less than segment_duration, use
|
||||
# gr_warm_up_time as the duration for a single warmup segment.
|
||||
# If gr_warm_up_time is greater than segment_duration but not an
|
||||
# exact multiple, use segment_duration and round up the number of segments.
|
||||
if "coupling" in args:
|
||||
coupling = args["coupling"]
|
||||
gr_warm_up_time = float(coupling["gr_warm_up_time"])
|
||||
segment_duration = float(options["simulation"]["segment_duration"])
|
||||
i_last_warmup_ini = (gr_warm_up_time/segment_duration)
|
||||
if i_last_warmup_ini == int(i_last_warmup_ini):
|
||||
simulation_duration = float(options["voltron"]["time"]["tFin"])
|
||||
if gr_warm_up_time % segment_duration == 0:
|
||||
warmup_segment_duration = segment_duration
|
||||
else:
|
||||
warmup_segment_duration = gr_warm_up_time/4
|
||||
if warmup_segment_duration != int(warmup_segment_duration):
|
||||
print("Error: gr_warm_up_time is not evenly divisible by 4.")
|
||||
raise ValueError("Invalid gr_warm_up_time value.")
|
||||
i_last_warmup_ini = (gr_warm_up_time/warmup_segment_duration)
|
||||
i_last_warmup_ini = int(i_last_warmup_ini)
|
||||
i_last_warmup_ini = int(gr_warm_up_time/warmup_segment_duration)
|
||||
elif gr_warm_up_time < segment_duration:
|
||||
warmup_segment_duration = gr_warm_up_time
|
||||
i_last_warmup_ini = int(gr_warm_up_time/warmup_segment_duration)
|
||||
elif gr_warm_up_time > segment_duration:
|
||||
warmup_segment_duration = segment_duration
|
||||
i_last_warmup_ini = int(math.ceil(gr_warm_up_time/segment_duration))
|
||||
# Add padding to tFin for coupling.
|
||||
if coupling["tfin_delta"] == "T":
|
||||
tfin_coupling_padding = float(options["voltron"]["coupling"]["dtCouple"]) - 1
|
||||
@@ -984,9 +987,11 @@ def create_ini_files(options: dict, args: dict):
|
||||
num_warmup_segments = i_last_warmup_ini
|
||||
# Create an .ini file for each simulation segment. Files for each
|
||||
# segment will be numbered starting with 1.
|
||||
print(f"Creating {options['pbs']['num_segments']} segments, "
|
||||
f"with {num_warmup_segments} warmup segments.")
|
||||
for job in range(1, int(options["pbs"]["num_segments"]) + 1 - num_warmup_segments):
|
||||
if "coupling" in args:
|
||||
num_segments = math.ceil((simulation_duration - num_warmup_segments*warmup_segment_duration)/segment_duration)
|
||||
else:
|
||||
num_segments = int(options["pbs"]["num_segments"])
|
||||
for job in range(1, num_segments + 1):
|
||||
opt = copy.deepcopy(options) # Need a copy of options
|
||||
runid = opt["simulation"]["job_name"]
|
||||
# NOTE: This naming scheme supports a maximum of 99 segments.
|
||||
@@ -1009,7 +1014,7 @@ def create_ini_files(options: dict, args: dict):
|
||||
if "coupling" in args:
|
||||
opt["voltron"]["coupling"]["doGCM"] = doGCM
|
||||
# tFin padding different for last segment.
|
||||
if job == int(options["pbs"]["num_segments"]) - num_warmup_segments:
|
||||
if job == num_segments:
|
||||
tfin_padding = -1.0
|
||||
else:
|
||||
# Subtract 1 from tFin padding for coupling beacuse to offset the +1.0 for restart file done above.
|
||||
@@ -1210,9 +1215,20 @@ def create_pbs_scripts(xml_files: list, options: dict, args: dict):
|
||||
coupling = args["coupling"]
|
||||
gr_warm_up_time = float(coupling["gr_warm_up_time"])
|
||||
segment_duration = float(options["simulation"]["segment_duration"])
|
||||
i_last_warmup_pbs_script = int(gr_warm_up_time/segment_duration)
|
||||
simulation_duration = float(options["voltron"]["time"]["tFin"])
|
||||
if gr_warm_up_time % segment_duration == 0:
|
||||
warmup_segment_duration = segment_duration
|
||||
i_last_warmup_ini = int(gr_warm_up_time/warmup_segment_duration)
|
||||
elif gr_warm_up_time < segment_duration:
|
||||
warmup_segment_duration = gr_warm_up_time
|
||||
i_last_warmup_ini = int(gr_warm_up_time/warmup_segment_duration)
|
||||
elif gr_warm_up_time > segment_duration:
|
||||
warmup_segment_duration = segment_duration
|
||||
i_last_warmup_ini = int(math.ceil(gr_warm_up_time/segment_duration))
|
||||
num_warmup_segments = i_last_warmup_ini
|
||||
#i_last_warmup_pbs_script = int(gr_warm_up_time/segment_duration)
|
||||
spinup_pbs_scripts.append(pbs_scripts[0]) # Spinup script is first
|
||||
warmup_pbs_scripts = pbs_scripts[1:i_last_warmup_pbs_script + 1] # Warmup scripts
|
||||
warmup_pbs_scripts = pbs_scripts[1:num_warmup_segments + 1] # Warmup scripts
|
||||
# Return the paths to the PBS scripts.
|
||||
return pbs_scripts, submit_all_jobs_script,spinup_pbs_scripts, warmup_pbs_scripts
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ module clocks
|
||||
!Depth/parent, ie array entry of parent of this clock
|
||||
integer :: level=-1,parent=-1
|
||||
integer :: iTic=0,iToc=0 !Integer ticks
|
||||
integer :: nCalls=0 !Number of tocs, or finished timer loops since cleaning
|
||||
real(rp) :: tElap=0.0 !Elapsed time
|
||||
end type Clock_T
|
||||
|
||||
@@ -50,6 +51,11 @@ module clocks
|
||||
interface readClock
|
||||
module procedure readClock_str, readClock_int
|
||||
end interface
|
||||
|
||||
!interface for reading number of calls to a clock
|
||||
interface readNCalls
|
||||
module procedure readNCalls_str, readNCalls_int
|
||||
end interface
|
||||
|
||||
contains
|
||||
|
||||
@@ -171,6 +177,8 @@ module clocks
|
||||
wclk = real(kClocks(iblk)%iToc-kClocks(iblk)%iTic)/real(clockRate)
|
||||
|
||||
kClocks(iblk)%tElap = kClocks(iblk)%tElap + wclk
|
||||
kClocks(iblk)%nCalls = kClocks(iblk)%nCalls + 1
|
||||
|
||||
end subroutine Toc
|
||||
|
||||
!Reset clocks
|
||||
@@ -179,6 +187,7 @@ module clocks
|
||||
|
||||
do n=1,nclk
|
||||
kClocks(n)%tElap = 0
|
||||
kClocks(n)%nCalls = 0
|
||||
! if the clock is active, reset the tic to right now
|
||||
if(kClocks(n)%isOn) call Tic(kClocks(n)%cID, .true.)
|
||||
enddo
|
||||
@@ -223,6 +232,39 @@ module clocks
|
||||
endif
|
||||
end function readClock_int
|
||||
|
||||
function readNCalls_str(cID) result(nc)
|
||||
character(len=*), intent(in) :: cID
|
||||
|
||||
integer :: n,iblk
|
||||
integer :: nc
|
||||
|
||||
iblk = 0
|
||||
!Find timer
|
||||
do n=1,nclk
|
||||
if (toUpper(kClocks(n)%cID) == toUpper(cID)) then
|
||||
!Found it, save ID
|
||||
iblk = n
|
||||
endif
|
||||
enddo
|
||||
|
||||
nc = readNCalls_int(iblk)
|
||||
|
||||
end function readNCalls_str
|
||||
|
||||
function readNCalls_int(iblk) result(nc)
|
||||
integer, intent(in) :: iblk
|
||||
|
||||
integer :: tmpToc
|
||||
integer :: nc
|
||||
|
||||
if (iblk == 0) then
|
||||
nc = 0
|
||||
else
|
||||
nc = kClocks(iblk)%nCalls
|
||||
endif
|
||||
|
||||
end function readNCalls_int
|
||||
|
||||
!Output clock data
|
||||
subroutine printClocks()
|
||||
integer :: n,l
|
||||
|
||||
@@ -30,6 +30,7 @@ program voltron_mpix
|
||||
type(XML_Input_T) :: xmlInp
|
||||
real(rp) :: nextDT
|
||||
integer :: divideSize,i
|
||||
logical :: doResetClocks = .false.
|
||||
|
||||
! initialize MPI
|
||||
!Set up MPI with or without thread support
|
||||
@@ -206,12 +207,12 @@ program voltron_mpix
|
||||
if (vApp%IO%doTimerOut) then
|
||||
call printClocks()
|
||||
endif
|
||||
call cleanClocks()
|
||||
doResetClocks = .true.
|
||||
elseif (vApp%IO%doTimer(vApp%time)) then
|
||||
if (vApp%IO%doTimerOut) then
|
||||
call printClocks()
|
||||
endif
|
||||
call cleanClocks()
|
||||
doResetClocks = .true.
|
||||
endif
|
||||
|
||||
!Data output
|
||||
@@ -223,6 +224,12 @@ program voltron_mpix
|
||||
if (vApp%IO%doRestart(vApp%time)) then
|
||||
call resOutputV(vApp,vApp%gApp)
|
||||
endif
|
||||
|
||||
!Reset clocks last so data is available for all output
|
||||
if (doResetClocks) then
|
||||
call cleanClocks()
|
||||
doResetClocks = .false.
|
||||
endif
|
||||
|
||||
call Toc("IO", .true.)
|
||||
call Toc("Omega", .true.)
|
||||
@@ -257,11 +264,11 @@ program voltron_mpix
|
||||
|
||||
!Timing info
|
||||
if (gApp%Model%IO%doTimerOut) call printClocks()
|
||||
call cleanClocks()
|
||||
doResetClocks = .true.
|
||||
|
||||
elseif (gApp%Model%IO%doTimer(gApp%Model%t)) then
|
||||
if (gApp%Model%IO%doTimerOut) call printClocks()
|
||||
call cleanClocks()
|
||||
doResetClocks = .true.
|
||||
endif
|
||||
|
||||
if (gApp%Model%IO%doOutput(gApp%Model%t)) then
|
||||
@@ -274,6 +281,11 @@ program voltron_mpix
|
||||
call gApp%WriteRestart(gApp%Model%IO%nRes)
|
||||
endif
|
||||
|
||||
if (doResetClocks) then
|
||||
call cleanClocks()
|
||||
doResetClocks = .false.
|
||||
endif
|
||||
|
||||
call Toc("IO")
|
||||
call Toc("Omega", .true.)
|
||||
end do
|
||||
|
||||
@@ -11,6 +11,7 @@ program voltronx
|
||||
|
||||
type(voltApp_T) :: vApp
|
||||
real(rp) :: nextDT
|
||||
logical :: doResetClocks = .false.
|
||||
|
||||
call initClocks()
|
||||
|
||||
@@ -37,10 +38,10 @@ program voltronx
|
||||
call consoleOutputV(vApp,vApp%gApp)
|
||||
!Timing info
|
||||
if (vApp%IO%doTimerOut) call printClocks()
|
||||
call cleanClocks()
|
||||
doResetClocks = .true.
|
||||
elseif (vApp%IO%doTimer(vApp%time)) then
|
||||
if (vApp%IO%doTimerOut) call printClocks()
|
||||
call cleanClocks()
|
||||
doResetClocks = .true.
|
||||
endif
|
||||
|
||||
!Data output
|
||||
@@ -51,6 +52,11 @@ program voltronx
|
||||
if (vApp%IO%doRestart(vApp%time)) then
|
||||
call resOutputV(vApp,vApp%gApp)
|
||||
endif
|
||||
!Reset clocks last
|
||||
if (doResetClocks) then
|
||||
call cleanClocks()
|
||||
doResetClocks = .false.
|
||||
endif
|
||||
|
||||
call Toc("IO", .true.)
|
||||
|
||||
|
||||
@@ -73,6 +73,7 @@ module gamapp
|
||||
subroutine stepGamera(gameraApp)
|
||||
class(gamApp_T), intent(inout) :: gameraApp
|
||||
|
||||
call Tic("Advance", .true.)
|
||||
!update the state variables to the next timestep
|
||||
call UpdateStateData(gameraApp)
|
||||
|
||||
@@ -82,11 +83,13 @@ module gamapp
|
||||
call Toc("DT")
|
||||
|
||||
!Enforce BCs
|
||||
call Tic("BCs")
|
||||
call Tic("BCs", .true.)
|
||||
call EnforceBCs(gameraApp%Model,gameraApp%Grid,gameraApp%State)
|
||||
!Update Bxyz's
|
||||
call bFlux2Fld (gameraApp%Model,gameraApp%Grid,gameraApp%State%magFlux,gameraApp%State%Bxyz)
|
||||
call Toc("BCs")
|
||||
call Toc("BCs", .true.)
|
||||
|
||||
call Toc("Advance", .true.)
|
||||
|
||||
end subroutine stepGamera
|
||||
|
||||
|
||||
@@ -258,7 +258,7 @@ module gioH5
|
||||
type(State_T), intent(in) :: State
|
||||
character(len=*), intent(in) :: gStr
|
||||
|
||||
integer :: i,j,k,s
|
||||
integer :: i,j,k,s,nClkSteps
|
||||
character(len=strLen) :: dID,VxID,VyID,VzID,PID
|
||||
integer iMin,iMax,jMin,jMax,kMin,kMax
|
||||
|
||||
@@ -539,6 +539,18 @@ module gioH5
|
||||
call AddOutVar(IOVars,"kzcsTOT",Model%kzcsTOT,uStr="kZCs",dStr="Total kZCs" )
|
||||
|
||||
!---------------------
|
||||
!Performance metrics
|
||||
|
||||
nClkSteps = readNCalls('Advance')
|
||||
call AddOutVar(IOVars,"_perf_stepTime",readClock(1)/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_mathTime", readClock('Gamera')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_bcTime", readClock('BCs')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_haloTime", readClock('Halos')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_voltTime", readClock('VoltSync')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_ioTime", readClock('IO')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_advanceTime", readClock('Advance')/nClkSteps)
|
||||
|
||||
!----------------------
|
||||
|
||||
!Call user routine if defined
|
||||
if (associated(Model%HackIO)) then
|
||||
|
||||
@@ -584,9 +584,9 @@ module gamapp_mpi
|
||||
character(len=strLen) :: BCID
|
||||
|
||||
!Enforce BCs
|
||||
call Tic("BCs")
|
||||
call Tic("BCs", .true.)
|
||||
call EnforceBCs(gamAppMpi%Model,gamAppMpi%Grid,State)
|
||||
call Toc("BCs")
|
||||
call Toc("BCs", .true.)
|
||||
|
||||
!Track timing for all gamera ranks to finish physical BCs
|
||||
! Only synchronize when timing
|
||||
@@ -597,10 +597,10 @@ module gamapp_mpi
|
||||
endif
|
||||
|
||||
!Update ghost cells
|
||||
call Tic("Halos")
|
||||
call Tic("Halos", .true.)
|
||||
call HaloUpdate(gamAppMpi, State)
|
||||
call bFlux2Fld(gamAppMpi%Model, gamappMpi%Grid, State%magFlux, State%Bxyz) !Update Bxyz's
|
||||
call Toc("Halos")
|
||||
call Toc("Halos", .true.)
|
||||
|
||||
!Track timing for all gamera ranks to finish halo comms
|
||||
! Only synchronize when timing
|
||||
@@ -611,6 +611,7 @@ module gamapp_mpi
|
||||
endif
|
||||
|
||||
! Re-apply periodic BCs last
|
||||
call Tic("BCs", .true.)
|
||||
do i=1,gamAppMpi%Grid%NumBC
|
||||
if(allocated(gamAppMpi%Grid%externalBCs(i)%p)) then
|
||||
SELECT type(bc=>gamAppMpi%Grid%externalBCs(i)%p)
|
||||
@@ -649,6 +650,7 @@ module gamapp_mpi
|
||||
endselect
|
||||
endif
|
||||
enddo
|
||||
call Toc("BCs", .true.)
|
||||
|
||||
!Track timing for all gamera ranks to finish periodic BCs
|
||||
! Only synchronize when timing
|
||||
@@ -666,6 +668,7 @@ module gamapp_mpi
|
||||
integer :: ierr,i
|
||||
real(rp) :: tmp
|
||||
|
||||
call Tic("Advance", .true.)
|
||||
!update the state variables to the next timestep
|
||||
call UpdateStateData(gamAppMpi)
|
||||
|
||||
@@ -684,6 +687,7 @@ module gamapp_mpi
|
||||
|
||||
!Update BCs MPI style
|
||||
call updateMpiBCs(gamAppMpi, gamAppmpi%State)
|
||||
call Toc("Advance", .true.)
|
||||
|
||||
end subroutine stepGamera_mpi
|
||||
|
||||
|
||||
@@ -34,21 +34,21 @@ module raijuAdvancer
|
||||
|
||||
State%dt = dtCpl
|
||||
|
||||
call Tic("Pre-Advance")
|
||||
call Tic("Pre-Advance",.true.)
|
||||
call raijuPreAdvance(Model, Grid, State)
|
||||
call Toc("Pre-Advance")
|
||||
call Toc("Pre-Advance",.true.)
|
||||
State%isFirstCpl = .false.
|
||||
|
||||
! Step
|
||||
call Tic("AdvanceState")
|
||||
call Tic("AdvanceState",.true.)
|
||||
call AdvanceState(Model, Grid, State)
|
||||
call Toc("AdvanceState")
|
||||
call Toc("AdvanceState",.true.)
|
||||
|
||||
! etas back to moments
|
||||
call Tic("Moments Eval")
|
||||
call Tic("Moments Eval",.true.)
|
||||
call EvalMoments(Grid, State)
|
||||
call EvalMoments(Grid, State, doAvgO=.true.)
|
||||
call Toc("Moments Eval")
|
||||
call Toc("Moments Eval",.true.)
|
||||
|
||||
end subroutine raijuAdvance
|
||||
|
||||
|
||||
@@ -154,7 +154,7 @@ module raijuIO
|
||||
logical, optional, intent(in) :: doGhostsO
|
||||
|
||||
type(IOVAR_T), dimension(MAXIOVAR) :: IOVars
|
||||
integer :: i,j,k,s
|
||||
integer :: i,j,k,s, nClkSteps
|
||||
integer :: is, ie, js, je, ks, ke
|
||||
integer, dimension(4) :: outBnds2D
|
||||
logical :: doGhosts
|
||||
@@ -424,8 +424,14 @@ module raijuIO
|
||||
deallocate(outTmp2D)
|
||||
endif
|
||||
|
||||
call WriteVars(IOVars,.true.,Model%raijuH5, gStr)
|
||||
!Performance Metrics
|
||||
nClkSteps = readNCalls('DeepUpdate')
|
||||
call AddOutVar(IOVars, "_perf_stepTime", readClock(1)/nClkSteps)
|
||||
call AddOutVar(IOVars, "_perf_preAdvance", readClock("Pre-Advance")/nClkSteps)
|
||||
call AddOutVar(IOVars, "_perf_advanceState", readClock("AdvanceState")/nClkSteps)
|
||||
call AddOutVar(IOVars, "_perf_moments", readClock("Moments Eval")/nClkSteps)
|
||||
|
||||
call WriteVars(IOVars,.true.,Model%raijuH5, gStr)
|
||||
|
||||
! Any extra groups to add
|
||||
if (Model%doLosses .and. Model%doOutput_3DLoss) then
|
||||
|
||||
@@ -356,9 +356,10 @@ module voltapp_mpi
|
||||
|
||||
if(.not. vApp%doSerialMHD) call vApp%gApp%StartUpdateMhdData(vApp)
|
||||
|
||||
call Tic("DeepUpdate")
|
||||
call Tic("DeepUpdate",.true.)
|
||||
call DeepUpdate_mpi(vApp)
|
||||
call Toc("DeepUpdate")
|
||||
call Toc("DeepUpdate",.true.)
|
||||
vApp%ts = vApp%ts + 1
|
||||
|
||||
if(vApp%doSerialMHD) call vApp%gApp%StartUpdateMhdData(vApp)
|
||||
|
||||
@@ -404,8 +405,6 @@ module voltapp_mpi
|
||||
|
||||
! only do imag after spinup
|
||||
if(vApp%doDeep .and. vApp%time >= 0) then
|
||||
call Tic("DeepUpdate", .true.)
|
||||
|
||||
if(vApp%useHelpers) call vhReqStep(vApp)
|
||||
|
||||
! instead of PreDeep, use Tube Helpers and replicate other calls
|
||||
@@ -442,7 +441,6 @@ module voltapp_mpi
|
||||
call DoImag(vApp)
|
||||
|
||||
vApp%deepProcessingInProgress = .true.
|
||||
call Toc("DeepUpdate", .true.)
|
||||
elseif(vApp%doDeep) then
|
||||
vApp%gApp%Grid%Gas0 = 0
|
||||
!Load TM03 into Gas0 for ingestion during spinup
|
||||
@@ -459,7 +457,6 @@ module voltapp_mpi
|
||||
|
||||
! only do imag after spinup with deep enabled
|
||||
if(vApp%doDeep .and. vApp%time >= 0) then
|
||||
call Tic("DeepUpdate", .true.)
|
||||
|
||||
do while(SquishBlocksRemain(vApp))
|
||||
call Tic("Squish",.true.)
|
||||
@@ -477,7 +474,6 @@ module voltapp_mpi
|
||||
|
||||
call SquishEnd(vApp)
|
||||
call PostDeep(vApp, vApp%gApp)
|
||||
call Toc("DeepUpdate", .true.)
|
||||
endif
|
||||
|
||||
end subroutine endDeep
|
||||
@@ -496,11 +492,9 @@ module voltapp_mpi
|
||||
if(.not. vApp%deepProcessingInProgress) return
|
||||
|
||||
if(SquishBlocksRemain(vApp)) then
|
||||
call Tic("DeepUpdate")
|
||||
call Tic("Squish",.true.)
|
||||
call DoSquishBlock(vApp)
|
||||
call Toc("Squish",.true.)
|
||||
call Toc("DeepUpdate")
|
||||
endif
|
||||
|
||||
if(.not. SquishBlocksRemain(vApp)) then
|
||||
|
||||
@@ -334,9 +334,10 @@ module voltapp
|
||||
! update the next predicted coupling interval
|
||||
vApp%DeepT = vApp%DeepT + vApp%DeepDT
|
||||
|
||||
call Tic("DeepUpdate")
|
||||
call Tic("DeepUpdate",.true.)
|
||||
call DeepUpdate(vApp, vApp%gApp)
|
||||
call Toc("DeepUpdate")
|
||||
call Toc("DeepUpdate",.true.)
|
||||
vApp%ts = vApp%ts + 1
|
||||
|
||||
call vApp%gApp%StartUpdateMhdData(vApp)
|
||||
|
||||
|
||||
@@ -389,7 +389,7 @@ module voltio
|
||||
type(IOVAR_T), dimension(MAXVOLTIOVAR) :: IOVars
|
||||
real(rp) :: symh
|
||||
|
||||
integer :: is,ie,js,je
|
||||
integer :: is,ie,js,je,nClkSteps
|
||||
real(rp) :: Csijk,Con(NVAR)
|
||||
real(rp) :: BSDst0,AvgBSDst,DPSDst,BSSMRs(4)
|
||||
integer, dimension(4) :: outSGVBnds_corner
|
||||
@@ -444,6 +444,16 @@ module voltio
|
||||
call AddOutVar(IOVars,"MJD" ,vApp%MJD)
|
||||
call AddOutVar(IOVars,"timestep",vApp%ts)
|
||||
|
||||
!Performance metrics
|
||||
nClkSteps = readNCalls('DeepUpdate')
|
||||
call AddOutVar(IOVars,"_perf_stepTime",readClock(1)/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_deepUpdateTime",readClock(1)/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_gamTime", readClock('GameraSync')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_squishTime", (readClock('Squish')+readClock('VoltHelpers'))/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_imagTime", readClock('InnerMag')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_mixTime", readClock('ReMIX')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_tubesTime", readClock('VoltTubes')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_ioTime", readClock('IO')/nClkSteps)
|
||||
|
||||
! voltState stuff
|
||||
call AddOutSGV(IOVars, "Potential_total", vApp%State%potential_total, &
|
||||
|
||||
@@ -20,11 +20,11 @@ def create_command_line_parser():
|
||||
parser = argparse.ArgumentParser(description="Script to help setup automated tests within a kaiju repo")
|
||||
|
||||
parser.add_argument(
|
||||
"-A", required=True,
|
||||
"-A", default="",
|
||||
help="Charge code to use when running tests."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-ce", required=True,
|
||||
"-ce", default="",
|
||||
help="Conda environment name to load with conda module"
|
||||
)
|
||||
parser.add_argument(
|
||||
@@ -80,23 +80,58 @@ def main():
|
||||
|
||||
# Parse the command-line arguments.
|
||||
args = parser.parse_args()
|
||||
|
||||
# Adjust test options
|
||||
if args.all:
|
||||
args.unitTests = True
|
||||
args.weeklyDash = True
|
||||
args.compTests = True
|
||||
args.compTestsFull = True
|
||||
args.buildTests = True
|
||||
args.icTests = True
|
||||
args.intelChecks = True
|
||||
args.reproTests = True
|
||||
|
||||
if args.compTestsFull:
|
||||
args.compTests = False
|
||||
|
||||
if not (args.unitTests or args.weeklyDash or args.compTests or args.compTestsFull or
|
||||
args.buildTests or args.icTests or args.intelChecks or args.reproTests):
|
||||
parser.print_help()
|
||||
exit()
|
||||
|
||||
# find repo home directory
|
||||
called_from = os.path.dirname(os.path.abspath(__file__))
|
||||
os.chdir(called_from)
|
||||
os.chdir('..')
|
||||
homeDir = os.getcwd()
|
||||
|
||||
|
||||
# Check for necessary environment variables
|
||||
if 'KAIJUHOME' not in os.environ:
|
||||
print("The setupEnvironment.sh script must be sourced for the repo this script resides in before calling it.")
|
||||
if len(args.ce) == 0 and 'CONDA_DEFAULT_ENV' not in os.environ:
|
||||
print("A conda environment name was not supplied, and a currently loaded conda environment could not be determined.")
|
||||
print("Please either supply the name of a conda environment with the '-ce <name>' option,")
|
||||
print(" or load an entironment before running this script, and it should be automatically found.")
|
||||
exit()
|
||||
elif len(args.ce) == 0:
|
||||
args.ce = os.environ['CONDA_DEFAULT_ENV']
|
||||
print(f"Automatically setting conda environment to {args.ce}")
|
||||
if len(args.A) == 0 and (args.unitTests or args.weeklyDash or
|
||||
args.compTests or args.compTestsFull or args.intelChecks or args.reproTests):
|
||||
print("A charge code with not supplied, but the requested tests require one.")
|
||||
print("Please supply a charge code with the -A # option.")
|
||||
exit()
|
||||
if 'KAIJUHOME' not in os.environ:
|
||||
os.environ['KAIJUHOME'] = homeDir
|
||||
print(f"Running tests out of local git repository: {homeDir}")
|
||||
if pathlib.Path(homeDir).resolve() != pathlib.Path(os.environ['KAIJUHOME']).resolve():
|
||||
print("The setupEnvironment.sh script must be sourced for the repo this script resides in before calling it.")
|
||||
exit()
|
||||
if 'KAIPYHOME' not in os.environ:
|
||||
print("The setupEnvironment.sh script for ANY kaipy repo must be sourced before calling this.")
|
||||
if 'KAIPYHOME' not in os.environ and (args.weeklyDash or args.compTests or args.compTestsFull):
|
||||
print("The 'KAIPYHOME' environment variable was not set, but the requested tests require it.")
|
||||
print("The setupEnvironment.sh script for ANY kaipy repo must be sourced before running these tests.")
|
||||
exit()
|
||||
elif 'KAIPYHOME' not in os.environ:
|
||||
os.environ['KAIPYHOME'] = ""
|
||||
|
||||
# Set environment variables
|
||||
os.environ['MAGE_TEST_ROOT'] = homeDir
|
||||
@@ -124,22 +159,10 @@ def main():
|
||||
|
||||
|
||||
print(f"Running tests on branch {gitBranch}")
|
||||
print(f"Using charge code {args.A} with priority {args.p}")
|
||||
print(f"Running in folder {test_set_dir}")
|
||||
|
||||
# Adjust test options
|
||||
if args.all:
|
||||
args.unitTests = True
|
||||
args.weeklyDash = True
|
||||
args.compTests = True
|
||||
args.compTestsFull = True
|
||||
args.buildTests = True
|
||||
args.icTests = True
|
||||
args.intelChecks = True
|
||||
args.reproTests = True
|
||||
|
||||
if args.compTestsFull:
|
||||
args.compTests = False
|
||||
if len(args.A) > 0:
|
||||
print(f"Using charge code {args.A} with priority {args.p}")
|
||||
print(f"Running in folder test_runs/{test_set_dir}")
|
||||
print("")
|
||||
|
||||
# Run Tests
|
||||
if args.unitTests:
|
||||
|
||||
@@ -138,7 +138,7 @@ def main():
|
||||
# Compute the names of the job log files.
|
||||
job_file_0 = f"genTestData.o{job_ids[0]}" # 0 OKs
|
||||
job_file_1 = f"runCaseTests.o{job_ids[1]}" # 2 OKs
|
||||
job_file_2 = f"runNonCaseTests1.o{job_ids[2]}" # 7 OKs
|
||||
job_file_2 = f"runNonCaseTests1.o{job_ids[2]}" # 6 OKs
|
||||
job_file_3 = f"runNonCaseTests2.o{job_ids[3]}" # 1 OK
|
||||
if debug:
|
||||
print(f"job_file_0 = {job_file_0}")
|
||||
@@ -164,8 +164,8 @@ def main():
|
||||
elif 'job killed' in line:
|
||||
jobKilled = True
|
||||
|
||||
# There should be exactly 10 OKs.
|
||||
OK_COUNT_EXPECTED = 10
|
||||
# There should be exactly 9 OKs.
|
||||
OK_COUNT_EXPECTED = 9
|
||||
if verbose:
|
||||
print(f"Found {okCount} OKs, expected {OK_COUNT_EXPECTED}.")
|
||||
if okCount != OK_COUNT_EXPECTED:
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
<coupling dtCouple="5.0" imType="RAIJU" doQkSquish="T" qkSquishStride="2" doAsyncCoupling="F"/>
|
||||
<restart dtRes="10800.0"/>
|
||||
<imag doInit="T"/>
|
||||
<threading NumTh="64"/>
|
||||
<threading NumTh="128"/>
|
||||
<!-- without quick squish, estimated 13 helpers required -->
|
||||
<helpers numHelpers="2" useHelpers="T" doSquishHelp="T"/>
|
||||
</VOLTRON>
|
||||
|
||||
@@ -121,7 +121,7 @@ contains
|
||||
do j=1,2
|
||||
do k=1,2
|
||||
call AddInVar(IOVars,"P")
|
||||
write(h5Str,'(A,I0,A)') 'blast3d_large8_0002_0002_0002_000',i-1,'_0000_0000.gam.h5'
|
||||
write(h5Str,'(A,I0,A,I0,A,I0,A)') 'blast3d_large8_0002_0002_0002_000',i-1,'_000',j-1,'_000',k-1,'.gam.h5'
|
||||
call ReadVars(IOVars,.false.,h5Str,gStr)
|
||||
call IOArray3DFill(IOVars,"P",p8(1+(i-1)*ni2:i*ni2,1+(j-1)*nj2:j*nj2,1+(k-1)*nk2:k*nk2))
|
||||
call ClearIO(IOVars)
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
module testetautils
|
||||
use testHelper
|
||||
use kdefs, only : TINY
|
||||
use rcmdefs
|
||||
use conversion_module, only : almmax,almmin
|
||||
use RCM_mod_subs
|
||||
use torcm_mod
|
||||
use xml_input
|
||||
use etautils
|
||||
|
||||
implicit none
|
||||
|
||||
contains
|
||||
|
||||
@before
|
||||
subroutine setup()
|
||||
end subroutine setup
|
||||
|
||||
@after
|
||||
subroutine teardown()
|
||||
end subroutine teardown
|
||||
|
||||
!Helpers
|
||||
subroutine initAlams()
|
||||
real (rp) :: itimei,itimef
|
||||
integer(iprec) :: nstep
|
||||
type(XML_Input_T) :: xmlInp
|
||||
INTEGER(iprec) :: ierr
|
||||
|
||||
itimei = 0
|
||||
itimef = 60
|
||||
nstep = 0
|
||||
|
||||
xmlInp = New_XML_Input('cmriD.xml','Kaiju/Voltron',.true.)
|
||||
|
||||
! Get RCM to read rcmconfig.h5 and store in alamc
|
||||
call setFactors(6.371E6_rprec)
|
||||
call allocate_conversion_arrays (isize,jsize,kcsize)
|
||||
call RCM(itimei, itimef, nstep, 0_iprec)
|
||||
call RCM(itimei, itimef, nstep, 1_iprec)
|
||||
call Read_alam (kcsize, alamc, ikflavc, fudgec, almdel, almmax, almmin, iesize, ierr)
|
||||
end subroutine initAlams
|
||||
|
||||
@test
|
||||
subroutine testDPetaDP()
|
||||
real (rp) :: etas(kcsize)
|
||||
real (rp) :: vm
|
||||
real (rp) :: Do_rc ,Po_rc
|
||||
real (rp) :: Df_rc,Df_psph,Pf_rc
|
||||
|
||||
real(rp) :: OKerr
|
||||
character(len=strLen) :: checkMessage
|
||||
|
||||
real(rp) :: D_err, P_err
|
||||
|
||||
OKerr = 2e-2_rp
|
||||
|
||||
! 5 keV at L=10
|
||||
Do_rc = 0.5 *1E6! [1/cc -> 1/m^3]
|
||||
Po_rc = 0.5 *1E-9! [nPa -> Pa]
|
||||
vm = 2.262
|
||||
|
||||
call initAlams()
|
||||
|
||||
call DP2eta(Do_rc,Po_rc,vm,etas,doRescaleO=.false.,doKapO=.false.)
|
||||
call eta2DP(etas,vm,Df_rc,Df_psph,Pf_rc)
|
||||
|
||||
D_err = (Do_rc - Df_rc)/Do_rc
|
||||
P_err = (Po_rc - Pf_rc)/Po_rc
|
||||
|
||||
write(*,*) 'D_err=',D_err*100,'%'
|
||||
write(*,*) 'P_err=',P_err*100,'%'
|
||||
|
||||
write (checkMessage,'(A,I0,A)') "D->eta->D' error > ",OKerr*100,"%"
|
||||
@assertLessThanOrEqual(abs(D_err), OKerr, checkMessage)
|
||||
|
||||
write (checkMessage,'(A,I0,A)') "D->eta->D' error > ",OKerr*100,"%"
|
||||
@assertLessThanOrEqual(abs(P_err), OKerr, checkMessage)
|
||||
|
||||
end subroutine testDPetaDP
|
||||
|
||||
end module testetautils
|
||||
@@ -42,13 +42,6 @@ date
|
||||
echo 'REMIX tests complete.'
|
||||
echo | tail -n 3 ./mixTests.out
|
||||
|
||||
echo 'Running RCM tests.'
|
||||
date
|
||||
./rcmTests >& rcmTests.out
|
||||
date
|
||||
echo 'RCM tests complete.'
|
||||
echo | tail -n 3 ./rcmTests.out
|
||||
|
||||
echo 'Running SHELLGRID tests.'
|
||||
date
|
||||
./shgrTests >& shgrTests.out
|
||||
|
||||
Reference in New Issue
Block a user