mirror of
https://github.com/JHUAPL/kaiju.git
synced 2026-01-08 02:23:51 -05:00
Merge remote-tracking branch 'origin/development' into ewinter-derecho_testing
This commit is contained in:
4
.gitignore
vendored
4
.gitignore
vendored
@@ -8,11 +8,15 @@ external/FARGPARSE-*/
|
||||
external/GFTL-*/
|
||||
external/GFTL_SHARED-*/
|
||||
external/PFUNIT-*/
|
||||
|
||||
# skip F90 files in the tests folder, except in specific subfolders
|
||||
tests/*/*.F90
|
||||
!tests/helperCode/*.F90
|
||||
!tests/helperCode_mpi/*.F90
|
||||
|
||||
# any local automated tests that users have run
|
||||
test_runs/
|
||||
|
||||
# Pre-compile generated files
|
||||
src/base/git_info.F90
|
||||
|
||||
|
||||
91
CITATION.cff
Normal file
91
CITATION.cff
Normal file
@@ -0,0 +1,91 @@
|
||||
cff-version: 1.2.0
|
||||
message: "If you use this software, please cite it using the metadata below."
|
||||
title: "MAGE: Multiscale Atmosphere Geospace Environment Model"
|
||||
version: "1.25.1"
|
||||
date-released: 2025-07-03
|
||||
doi: 10.5281/zenodo.16818682
|
||||
repository-code: "https://github.com/JHUAPL/kaiju"
|
||||
license: "BSD 3-Clause"
|
||||
|
||||
authors:
|
||||
- family-names: Merkin
|
||||
given-names: Slava
|
||||
orcid: https://orcid.org/0000-0003-4344-5424
|
||||
affiliation: "Johns Hopkins University Applied Physics Laboratory"
|
||||
- family-names: Arnold
|
||||
given-names: Harry
|
||||
orcid: https://orcid.org/0000-0002-0449-1498
|
||||
affiliation: "Johns Hopkins University Applied Physics Laboratory"
|
||||
- family-names: Bao
|
||||
given-names: Shanshan
|
||||
orcid: https://orcid.org/0000-0002-5209-3988
|
||||
affiliation: "Rice University"
|
||||
- family-names: Garretson
|
||||
given-names: Jeffery
|
||||
orcid: https://orcid.org/0000-0003-3805-9860
|
||||
affiliation: "Johns Hopkins University Applied Physics Laboratory"
|
||||
- family-names: Lin
|
||||
given-names: Dong
|
||||
affiliation: "NSF National Center for Atmospheric Research"
|
||||
orcid: https://orcid.org/0000-0003-2894-6677
|
||||
- family-names: Lyon
|
||||
given-names: John
|
||||
affiliation: "Gamera Consulting"
|
||||
orcid: https://orcid.org/0000-0002-5759-9849
|
||||
- family-names: McCubbin
|
||||
given-names: Andrew
|
||||
orcid: https://orcid.org/0000-0002-6222-3627
|
||||
affiliation: "Johns Hopkins University Applied Physics Laboratory"
|
||||
- family-names: Michael
|
||||
given-names: Adam
|
||||
orcid: https://orcid.org/0000-0003-2227-1242
|
||||
affiliation: "Johns Hopkins University Applied Physics Laboratory"
|
||||
- family-names: Pham
|
||||
given-names: Kevin
|
||||
orcid: https://orcid.org/0000-0001-5031-5519
|
||||
affiliation: "NSF National Center for Atmospheric Research"
|
||||
- family-names: Provornikova
|
||||
given-names: Elena
|
||||
orcid: https://orcid.org/0000-0001-8875-7478
|
||||
affiliation: "Johns Hopkins University Applied Physics Laboratory"
|
||||
- family-names: Rao
|
||||
given-names: Nikhil
|
||||
affiliation: "NSF National Center for Atmospheric Research"
|
||||
orcid: https://orcid.org/0000-0003-2639-9892
|
||||
- family-names: Sciola
|
||||
given-names: Anthony
|
||||
orcid: https://orcid.org/0000-0002-9752-9618
|
||||
affiliation: "Johns Hopkins University Applied Physics Laboratory"
|
||||
- family-names: Sorathia
|
||||
given-names: Kareem
|
||||
orcid: https://orcid.org/0000-0002-6011-5470
|
||||
affiliation: "Johns Hopkins University Applied Physics Laboratory"
|
||||
- family-names: Toffoletto
|
||||
given-names: Frank
|
||||
orcid: https://orcid.org/0000-0001-7789-2615
|
||||
affiliation: "Rice University"
|
||||
- family-names: Ukhorskiy
|
||||
given-names: Aleksandr
|
||||
orcid: https://orcid.org/0000-0002-3326-4024
|
||||
affiliation: "Johns Hopkins University Applied Physics Laboratory"
|
||||
- family-names: Wang
|
||||
given-names: Wenbin
|
||||
orcid: https://orcid.org/0000-0002-6287-4542
|
||||
affiliation: "NSF National Center for Atmospheric Research"
|
||||
- family-names: Wiltberger
|
||||
given-names: Michael
|
||||
orcid: https://orcid.org/0000-0002-4844-3148
|
||||
affiliation: "NSF National Center for Atmospheric Research"
|
||||
- family-names: Winter
|
||||
given-names: Eric
|
||||
orcid: https://orcid.org/0000-0001-5226-2107
|
||||
affiliation: "Johns Hopkins University Applied Physics Laboratory"
|
||||
- family-names: Wu
|
||||
given-names: Haonon
|
||||
orcid: https://orcid.org/0000-0002-3272-8106
|
||||
affiliation: "NSF National Center for Atmospheric Research"
|
||||
|
||||
keywords:
|
||||
- "space weather"
|
||||
- "MAGE"
|
||||
- "geospace modeling"
|
||||
@@ -28,6 +28,7 @@ import json
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import math
|
||||
|
||||
# Import 3rd-party modules.
|
||||
import netCDF4
|
||||
@@ -356,7 +357,7 @@ def prompt_user_for_run_options(args):
|
||||
for on in ["use_segments"]:
|
||||
od[on]["default"] = "Y"
|
||||
o[on] = makeitso.get_run_option(on, od[on], mode)
|
||||
if o["use_segments"] == "Y":
|
||||
if o["use_segments"].upper() == "Y":
|
||||
for on in ["segment_duration"]:
|
||||
o[on] = makeitso.get_run_option(on, od[on], mode)
|
||||
else:
|
||||
@@ -364,7 +365,7 @@ def prompt_user_for_run_options(args):
|
||||
|
||||
# Compute the number of segments based on the simulation duration and
|
||||
# segment duration, add 1 if there is a remainder.
|
||||
if o["use_segments"] == "Y":
|
||||
if o["use_segments"].upper() == "Y":
|
||||
num_segments = simulation_duration/float(o["segment_duration"])
|
||||
if num_segments > int(num_segments):
|
||||
num_segments += 1
|
||||
@@ -564,7 +565,8 @@ def main():
|
||||
|
||||
segment_duration = float(engage_options["simulation"]["segment_duration"])
|
||||
makeitso_options["voltron"]["time"]["tFin"] = int((t1-t0).total_seconds())
|
||||
makeitso_options["pbs"]["num_segments"] = str(int((t1-t0).total_seconds()/segment_duration))
|
||||
num_segments = math.ceil((t1-t0).total_seconds()/segment_duration)
|
||||
makeitso_options["pbs"]["num_segments"] = str(num_segments)
|
||||
select2 = 1 + int(makeitso_options["pbs"]["num_helpers"])
|
||||
makeitso_options["pbs"]["select2"] = str(select2)
|
||||
|
||||
@@ -652,7 +654,9 @@ def main():
|
||||
|
||||
# Create the PBS job scripts.
|
||||
pbs_scripts, submit_all_jobs_script = create_pbs_scripts(engage_options,makeitso_options, makeitso_pbs_scripts, tiegcm_options, tiegcm_inp_scripts, tiegcm_pbs_scripts)
|
||||
print(f"pbs_scripts = {pbs_scripts}")
|
||||
print(f"GR_pbs_scripts = {makeitso_pbs_scripts}")
|
||||
print(f"Tiegcm_pbs_scripts = {tiegcm_pbs_scripts}")
|
||||
print(f"GTR_pbs_scripts = {pbs_scripts}")
|
||||
print(f"submit_all_jobs_script = {submit_all_jobs_script}")
|
||||
|
||||
|
||||
|
||||
@@ -33,6 +33,7 @@ import datetime
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import math
|
||||
|
||||
# Import 3rd-party modules.
|
||||
import h5py
|
||||
@@ -419,7 +420,7 @@ def prompt_user_for_run_options(option_descriptions: dict, args: dict):
|
||||
# condition file can be generated.
|
||||
for on in ["bcwind_available"]:
|
||||
o[on] = get_run_option(on, od[on], mode)
|
||||
if o["bcwind_available"] == "Y":
|
||||
if o["bcwind_available"].upper() == "Y":
|
||||
for on in ["bcwind_file"]:
|
||||
o[on] = get_run_option(on, od[on], mode)
|
||||
# Fetch the start and stop date from the bcwind file.
|
||||
@@ -889,26 +890,28 @@ def create_ini_files(options: dict, args: dict):
|
||||
# Set default value for padding to tFin for coupling.
|
||||
tfin_padding = 0.0
|
||||
# Engage modifications to parameters.
|
||||
# If TIEGCM coupling is specified, warmup segments are calculated
|
||||
# based on gr_warm_up_time and segment duration. If the segment
|
||||
# duration is not evenly divisible by gr_warm_up_time, the
|
||||
# warmup segment duration is set to gr_warm_up_time/4.
|
||||
# The number of warmup segments is set to gr_warm_up_time/
|
||||
# warmup_segment_duration.
|
||||
# If TIEGCM coupling is specified, calculate warmup segments based
|
||||
# on gr_warm_up_time and segment_duration.
|
||||
# If gr_warm_up_time is an exact multiple of segment_duration,
|
||||
# use segment_duration for each warmup segment.
|
||||
# If gr_warm_up_time is less than segment_duration, use
|
||||
# gr_warm_up_time as the duration for a single warmup segment.
|
||||
# If gr_warm_up_time is greater than segment_duration but not an
|
||||
# exact multiple, use segment_duration and round up the number of segments.
|
||||
if "coupling" in args:
|
||||
coupling = args["coupling"]
|
||||
gr_warm_up_time = float(coupling["gr_warm_up_time"])
|
||||
segment_duration = float(options["simulation"]["segment_duration"])
|
||||
i_last_warmup_ini = (gr_warm_up_time/segment_duration)
|
||||
if i_last_warmup_ini == int(i_last_warmup_ini):
|
||||
simulation_duration = float(options["voltron"]["time"]["tFin"])
|
||||
if gr_warm_up_time % segment_duration == 0:
|
||||
warmup_segment_duration = segment_duration
|
||||
else:
|
||||
warmup_segment_duration = gr_warm_up_time/4
|
||||
if warmup_segment_duration != int(warmup_segment_duration):
|
||||
print("Error: gr_warm_up_time is not evenly divisible by 4.")
|
||||
raise ValueError("Invalid gr_warm_up_time value.")
|
||||
i_last_warmup_ini = (gr_warm_up_time/warmup_segment_duration)
|
||||
i_last_warmup_ini = int(i_last_warmup_ini)
|
||||
i_last_warmup_ini = int(gr_warm_up_time/warmup_segment_duration)
|
||||
elif gr_warm_up_time < segment_duration:
|
||||
warmup_segment_duration = gr_warm_up_time
|
||||
i_last_warmup_ini = int(gr_warm_up_time/warmup_segment_duration)
|
||||
elif gr_warm_up_time > segment_duration:
|
||||
warmup_segment_duration = segment_duration
|
||||
i_last_warmup_ini = int(math.ceil(gr_warm_up_time/segment_duration))
|
||||
# Add padding to tFin for coupling.
|
||||
if coupling["tfin_delta"] == "T":
|
||||
tfin_coupling_padding = float(options["voltron"]["coupling"]["dtCouple"]) - 1
|
||||
@@ -984,9 +987,11 @@ def create_ini_files(options: dict, args: dict):
|
||||
num_warmup_segments = i_last_warmup_ini
|
||||
# Create an .ini file for each simulation segment. Files for each
|
||||
# segment will be numbered starting with 1.
|
||||
print(f"Creating {options['pbs']['num_segments']} segments, "
|
||||
f"with {num_warmup_segments} warmup segments.")
|
||||
for job in range(1, int(options["pbs"]["num_segments"]) + 1 - num_warmup_segments):
|
||||
if "coupling" in args:
|
||||
num_segments = math.ceil((simulation_duration - num_warmup_segments*warmup_segment_duration)/segment_duration)
|
||||
else:
|
||||
num_segments = int(options["pbs"]["num_segments"])
|
||||
for job in range(1, num_segments + 1):
|
||||
opt = copy.deepcopy(options) # Need a copy of options
|
||||
runid = opt["simulation"]["job_name"]
|
||||
# NOTE: This naming scheme supports a maximum of 99 segments.
|
||||
@@ -1009,7 +1014,7 @@ def create_ini_files(options: dict, args: dict):
|
||||
if "coupling" in args:
|
||||
opt["voltron"]["coupling"]["doGCM"] = doGCM
|
||||
# tFin padding different for last segment.
|
||||
if job == int(options["pbs"]["num_segments"]) - num_warmup_segments:
|
||||
if job == num_segments:
|
||||
tfin_padding = -1.0
|
||||
else:
|
||||
# Subtract 1 from tFin padding for coupling beacuse to offset the +1.0 for restart file done above.
|
||||
@@ -1210,9 +1215,20 @@ def create_pbs_scripts(xml_files: list, options: dict, args: dict):
|
||||
coupling = args["coupling"]
|
||||
gr_warm_up_time = float(coupling["gr_warm_up_time"])
|
||||
segment_duration = float(options["simulation"]["segment_duration"])
|
||||
i_last_warmup_pbs_script = int(gr_warm_up_time/segment_duration)
|
||||
simulation_duration = float(options["voltron"]["time"]["tFin"])
|
||||
if gr_warm_up_time % segment_duration == 0:
|
||||
warmup_segment_duration = segment_duration
|
||||
i_last_warmup_ini = int(gr_warm_up_time/warmup_segment_duration)
|
||||
elif gr_warm_up_time < segment_duration:
|
||||
warmup_segment_duration = gr_warm_up_time
|
||||
i_last_warmup_ini = int(gr_warm_up_time/warmup_segment_duration)
|
||||
elif gr_warm_up_time > segment_duration:
|
||||
warmup_segment_duration = segment_duration
|
||||
i_last_warmup_ini = int(math.ceil(gr_warm_up_time/segment_duration))
|
||||
num_warmup_segments = i_last_warmup_ini
|
||||
#i_last_warmup_pbs_script = int(gr_warm_up_time/segment_duration)
|
||||
spinup_pbs_scripts.append(pbs_scripts[0]) # Spinup script is first
|
||||
warmup_pbs_scripts = pbs_scripts[1:i_last_warmup_pbs_script + 1] # Warmup scripts
|
||||
warmup_pbs_scripts = pbs_scripts[1:num_warmup_segments + 1] # Warmup scripts
|
||||
# Return the paths to the PBS scripts.
|
||||
return pbs_scripts, submit_all_jobs_script,spinup_pbs_scripts, warmup_pbs_scripts
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ module arrayutil
|
||||
|
||||
|
||||
subroutine fillArray1D_R(array, val)
|
||||
real(rp), dimension(:), allocatable, intent(inout) :: array
|
||||
real(rp), dimension(:), intent(inout) :: array
|
||||
real(rp), intent(in) :: val
|
||||
|
||||
integer :: lbds(1),ubds(1),i
|
||||
@@ -37,7 +37,7 @@ module arrayutil
|
||||
end subroutine fillArray1D_R
|
||||
|
||||
subroutine fillArray1D_I(array, val)
|
||||
integer, dimension(:), allocatable, intent(inout) :: array
|
||||
integer, dimension(:), intent(inout) :: array
|
||||
integer, intent(in) :: val
|
||||
|
||||
integer :: lbds(1),ubds(1),i
|
||||
@@ -59,7 +59,7 @@ module arrayutil
|
||||
end subroutine fillArray1D_I
|
||||
|
||||
subroutine fillArray2D_R(array, val)
|
||||
real(rp), dimension(:,:), allocatable, intent(inout) :: array
|
||||
real(rp), dimension(:,:), intent(inout) :: array
|
||||
real(rp), intent(in) :: val
|
||||
|
||||
integer :: lbds(2),ubds(2),i,j
|
||||
@@ -83,7 +83,7 @@ module arrayutil
|
||||
end subroutine fillArray2D_R
|
||||
|
||||
subroutine fillArray2D_I(array, val)
|
||||
integer, dimension(:,:), allocatable, intent(inout) :: array
|
||||
integer, dimension(:,:), intent(inout) :: array
|
||||
integer, intent(in) :: val
|
||||
|
||||
integer :: lbds(2),ubds(2),i,j
|
||||
@@ -107,7 +107,7 @@ module arrayutil
|
||||
end subroutine fillArray2D_I
|
||||
|
||||
subroutine fillArray3D_R(array, val)
|
||||
real(rp), dimension(:,:,:), allocatable, intent(inout) :: array
|
||||
real(rp), dimension(:,:,:), intent(inout) :: array
|
||||
real(rp), intent(in) :: val
|
||||
|
||||
integer :: lbds(3),ubds(3),i,j,k
|
||||
@@ -133,7 +133,7 @@ module arrayutil
|
||||
end subroutine fillArray3D_R
|
||||
|
||||
subroutine fillArray3D_I(array, val)
|
||||
integer, dimension(:,:,:), allocatable, intent(inout) :: array
|
||||
integer, dimension(:,:,:), intent(inout) :: array
|
||||
integer, intent(in) :: val
|
||||
|
||||
integer :: lbds(3),ubds(3),i,j,k
|
||||
@@ -159,7 +159,7 @@ module arrayutil
|
||||
end subroutine fillArray3D_I
|
||||
|
||||
subroutine fillArray4D_R(array, val)
|
||||
real(rp), dimension(:,:,:,:), allocatable, intent(inout) :: array
|
||||
real(rp), dimension(:,:,:,:), intent(inout) :: array
|
||||
real(rp), intent(in) :: val
|
||||
|
||||
integer :: lbds(4),ubds(4),i,j,k
|
||||
@@ -185,7 +185,7 @@ module arrayutil
|
||||
end subroutine fillArray4D_R
|
||||
|
||||
subroutine fillArray4D_I(array, val)
|
||||
integer, dimension(:,:,:,:), allocatable, intent(inout) :: array
|
||||
integer, dimension(:,:,:,:), intent(inout) :: array
|
||||
integer, intent(in) :: val
|
||||
|
||||
integer :: lbds(4),ubds(4),i,j,k
|
||||
@@ -211,7 +211,7 @@ module arrayutil
|
||||
end subroutine fillArray4D_I
|
||||
|
||||
subroutine fillArray5D_R(array, val)
|
||||
real(rp), dimension(:,:,:,:,:), allocatable, intent(inout) :: array
|
||||
real(rp), dimension(:,:,:,:,:), intent(inout) :: array
|
||||
real(rp), intent(in) :: val
|
||||
|
||||
integer :: lbds(5),ubds(5),i,j,k
|
||||
@@ -237,7 +237,7 @@ module arrayutil
|
||||
end subroutine fillArray5D_R
|
||||
|
||||
subroutine fillArray5D_I(array, val)
|
||||
integer, dimension(:,:,:,:,:), allocatable, intent(inout) :: array
|
||||
integer, dimension(:,:,:,:,:), intent(inout) :: array
|
||||
integer, intent(in) :: val
|
||||
|
||||
integer :: lbds(5),ubds(5),i,j,k
|
||||
|
||||
@@ -38,6 +38,7 @@ module clocks
|
||||
!Depth/parent, ie array entry of parent of this clock
|
||||
integer :: level=-1,parent=-1
|
||||
integer :: iTic=0,iToc=0 !Integer ticks
|
||||
integer :: nCalls=0 !Number of tocs, or finished timer loops since cleaning
|
||||
real(rp) :: tElap=0.0 !Elapsed time
|
||||
end type Clock_T
|
||||
|
||||
@@ -50,6 +51,11 @@ module clocks
|
||||
interface readClock
|
||||
module procedure readClock_str, readClock_int
|
||||
end interface
|
||||
|
||||
!interface for reading number of calls to a clock
|
||||
interface readNCalls
|
||||
module procedure readNCalls_str, readNCalls_int
|
||||
end interface
|
||||
|
||||
contains
|
||||
|
||||
@@ -171,6 +177,8 @@ module clocks
|
||||
wclk = real(kClocks(iblk)%iToc-kClocks(iblk)%iTic)/real(clockRate)
|
||||
|
||||
kClocks(iblk)%tElap = kClocks(iblk)%tElap + wclk
|
||||
kClocks(iblk)%nCalls = kClocks(iblk)%nCalls + 1
|
||||
|
||||
end subroutine Toc
|
||||
|
||||
!Reset clocks
|
||||
@@ -179,6 +187,7 @@ module clocks
|
||||
|
||||
do n=1,nclk
|
||||
kClocks(n)%tElap = 0
|
||||
kClocks(n)%nCalls = 0
|
||||
! if the clock is active, reset the tic to right now
|
||||
if(kClocks(n)%isOn) call Tic(kClocks(n)%cID, .true.)
|
||||
enddo
|
||||
@@ -223,6 +232,39 @@ module clocks
|
||||
endif
|
||||
end function readClock_int
|
||||
|
||||
function readNCalls_str(cID) result(nc)
|
||||
character(len=*), intent(in) :: cID
|
||||
|
||||
integer :: n,iblk
|
||||
integer :: nc
|
||||
|
||||
iblk = 0
|
||||
!Find timer
|
||||
do n=1,nclk
|
||||
if (toUpper(kClocks(n)%cID) == toUpper(cID)) then
|
||||
!Found it, save ID
|
||||
iblk = n
|
||||
endif
|
||||
enddo
|
||||
|
||||
nc = readNCalls_int(iblk)
|
||||
|
||||
end function readNCalls_str
|
||||
|
||||
function readNCalls_int(iblk) result(nc)
|
||||
integer, intent(in) :: iblk
|
||||
|
||||
integer :: tmpToc
|
||||
integer :: nc
|
||||
|
||||
if (iblk == 0) then
|
||||
nc = 0
|
||||
else
|
||||
nc = kClocks(iblk)%nCalls
|
||||
endif
|
||||
|
||||
end function readNCalls_int
|
||||
|
||||
!Output clock data
|
||||
subroutine printClocks()
|
||||
integer :: n,l
|
||||
|
||||
@@ -70,6 +70,7 @@ module raijudefs
|
||||
real(rp), parameter :: def_cfl = 0.3
|
||||
real(rp), parameter :: cflMax = 0.3
|
||||
logical, parameter :: def_doUseVelLRs = .true.
|
||||
logical, parameter :: def_doSmoothGrads = .true.
|
||||
|
||||
! Domain limits
|
||||
! Buffer not allowed beyond min of maxTail and maxSun
|
||||
|
||||
@@ -147,6 +147,11 @@ module raijutypes
|
||||
|
||||
|
||||
!--- State ---!
|
||||
logical :: doCS_next_preAdv = .false.
|
||||
!! Signal to run coldstart next time raiju preAdvances
|
||||
real(rp) :: modelDst_next_preAdv = 0.0_rp
|
||||
!! Target Dst [nT] when we run coldstart next
|
||||
|
||||
logical :: doneFirstCS = .false.
|
||||
!! Have we executed once already?
|
||||
real(rp) :: lastEval = -1*HUGE
|
||||
@@ -221,6 +226,8 @@ module raijutypes
|
||||
!! For debug
|
||||
logical :: writeGhosts
|
||||
!! For debug
|
||||
logical :: doSmoothGrads
|
||||
!! Whether or not we smooth variables (bvol and electric potential) before taking gradients
|
||||
logical :: doClockConsoleOut
|
||||
!! If we are driving, output clock info
|
||||
logical :: doOutput_potGrads
|
||||
|
||||
@@ -65,22 +65,19 @@ module chmpfields
|
||||
allocate(By(Nip,Njp,Nkp))
|
||||
allocate(Bz(Nip,Njp,Nkp))
|
||||
|
||||
allocate(Vx(Nip,Njp,Nkp,0:Model%nSpc))
|
||||
allocate(Vy(Nip,Njp,Nkp,0:Model%nSpc))
|
||||
allocate(Vz(Nip,Njp,Nkp,0:Model%nSpc))
|
||||
|
||||
if (Model%doMHD) then
|
||||
allocate(D (Nip,Njp,Nkp,0:Model%nSpc))
|
||||
allocate(P (Nip,Njp,Nkp,0:Model%nSpc))
|
||||
allocate(Vx(Nip,Njp,Nkp,0:Model%nSpc))
|
||||
allocate(Vy(Nip,Njp,Nkp,0:Model%nSpc))
|
||||
allocate(Vz(Nip,Njp,Nkp,0:Model%nSpc))
|
||||
else
|
||||
allocate(Vx(Nip,Njp,Nkp,0))
|
||||
allocate(Vy(Nip,Njp,Nkp,0))
|
||||
allocate(Vz(Nip,Njp,Nkp,0))
|
||||
endif
|
||||
|
||||
if (Model%doJ) then
|
||||
allocate(Jx(Nip,Njp,Nkp))
|
||||
allocate(Jy(Nip,Njp,Nkp))
|
||||
allocate(Jz(Nip,Njp,Nkp))
|
||||
|
||||
endif
|
||||
|
||||
!------------
|
||||
|
||||
@@ -30,6 +30,7 @@ program voltron_mpix
|
||||
type(XML_Input_T) :: xmlInp
|
||||
real(rp) :: nextDT
|
||||
integer :: divideSize,i
|
||||
logical :: doResetClocks = .false.
|
||||
|
||||
! initialize MPI
|
||||
!Set up MPI with or without thread support
|
||||
@@ -206,12 +207,12 @@ program voltron_mpix
|
||||
if (vApp%IO%doTimerOut) then
|
||||
call printClocks()
|
||||
endif
|
||||
call cleanClocks()
|
||||
doResetClocks = .true.
|
||||
elseif (vApp%IO%doTimer(vApp%time)) then
|
||||
if (vApp%IO%doTimerOut) then
|
||||
call printClocks()
|
||||
endif
|
||||
call cleanClocks()
|
||||
doResetClocks = .true.
|
||||
endif
|
||||
|
||||
!Data output
|
||||
@@ -223,6 +224,12 @@ program voltron_mpix
|
||||
if (vApp%IO%doRestart(vApp%time)) then
|
||||
call resOutputV(vApp,vApp%gApp)
|
||||
endif
|
||||
|
||||
!Reset clocks last so data is available for all output
|
||||
if (doResetClocks) then
|
||||
call cleanClocks()
|
||||
doResetClocks = .false.
|
||||
endif
|
||||
|
||||
call Toc("IO", .true.)
|
||||
call Toc("Omega", .true.)
|
||||
@@ -257,11 +264,11 @@ program voltron_mpix
|
||||
|
||||
!Timing info
|
||||
if (gApp%Model%IO%doTimerOut) call printClocks()
|
||||
call cleanClocks()
|
||||
doResetClocks = .true.
|
||||
|
||||
elseif (gApp%Model%IO%doTimer(gApp%Model%t)) then
|
||||
if (gApp%Model%IO%doTimerOut) call printClocks()
|
||||
call cleanClocks()
|
||||
doResetClocks = .true.
|
||||
endif
|
||||
|
||||
if (gApp%Model%IO%doOutput(gApp%Model%t)) then
|
||||
@@ -274,6 +281,11 @@ program voltron_mpix
|
||||
call gApp%WriteRestart(gApp%Model%IO%nRes)
|
||||
endif
|
||||
|
||||
if (doResetClocks) then
|
||||
call cleanClocks()
|
||||
doResetClocks = .false.
|
||||
endif
|
||||
|
||||
call Toc("IO")
|
||||
call Toc("Omega", .true.)
|
||||
end do
|
||||
|
||||
@@ -11,6 +11,7 @@ program voltronx
|
||||
|
||||
type(voltApp_T) :: vApp
|
||||
real(rp) :: nextDT
|
||||
logical :: doResetClocks = .false.
|
||||
|
||||
call initClocks()
|
||||
|
||||
@@ -37,10 +38,10 @@ program voltronx
|
||||
call consoleOutputV(vApp,vApp%gApp)
|
||||
!Timing info
|
||||
if (vApp%IO%doTimerOut) call printClocks()
|
||||
call cleanClocks()
|
||||
doResetClocks = .true.
|
||||
elseif (vApp%IO%doTimer(vApp%time)) then
|
||||
if (vApp%IO%doTimerOut) call printClocks()
|
||||
call cleanClocks()
|
||||
doResetClocks = .true.
|
||||
endif
|
||||
|
||||
!Data output
|
||||
@@ -51,6 +52,11 @@ program voltronx
|
||||
if (vApp%IO%doRestart(vApp%time)) then
|
||||
call resOutputV(vApp,vApp%gApp)
|
||||
endif
|
||||
!Reset clocks last
|
||||
if (doResetClocks) then
|
||||
call cleanClocks()
|
||||
doResetClocks = .false.
|
||||
endif
|
||||
|
||||
call Toc("IO", .true.)
|
||||
|
||||
|
||||
@@ -73,6 +73,7 @@ module gamapp
|
||||
subroutine stepGamera(gameraApp)
|
||||
class(gamApp_T), intent(inout) :: gameraApp
|
||||
|
||||
call Tic("Advance", .true.)
|
||||
!update the state variables to the next timestep
|
||||
call UpdateStateData(gameraApp)
|
||||
|
||||
@@ -82,11 +83,13 @@ module gamapp
|
||||
call Toc("DT")
|
||||
|
||||
!Enforce BCs
|
||||
call Tic("BCs")
|
||||
call Tic("BCs", .true.)
|
||||
call EnforceBCs(gameraApp%Model,gameraApp%Grid,gameraApp%State)
|
||||
!Update Bxyz's
|
||||
call bFlux2Fld (gameraApp%Model,gameraApp%Grid,gameraApp%State%magFlux,gameraApp%State%Bxyz)
|
||||
call Toc("BCs")
|
||||
call Toc("BCs", .true.)
|
||||
|
||||
call Toc("Advance", .true.)
|
||||
|
||||
end subroutine stepGamera
|
||||
|
||||
|
||||
@@ -258,7 +258,7 @@ module gioH5
|
||||
type(State_T), intent(in) :: State
|
||||
character(len=*), intent(in) :: gStr
|
||||
|
||||
integer :: i,j,k,s
|
||||
integer :: i,j,k,s,nClkSteps
|
||||
character(len=strLen) :: dID,VxID,VyID,VzID,PID
|
||||
integer iMin,iMax,jMin,jMax,kMin,kMax
|
||||
|
||||
@@ -539,6 +539,18 @@ module gioH5
|
||||
call AddOutVar(IOVars,"kzcsTOT",Model%kzcsTOT,uStr="kZCs",dStr="Total kZCs" )
|
||||
|
||||
!---------------------
|
||||
!Performance metrics
|
||||
|
||||
nClkSteps = readNCalls('Advance')
|
||||
call AddOutVar(IOVars,"_perf_stepTime",readClock(1)/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_mathTime", readClock('Gamera')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_bcTime", readClock('BCs')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_haloTime", readClock('Halos')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_voltTime", readClock('VoltSync')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_ioTime", readClock('IO')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_advanceTime", readClock('Advance')/nClkSteps)
|
||||
|
||||
!----------------------
|
||||
|
||||
!Call user routine if defined
|
||||
if (associated(Model%HackIO)) then
|
||||
|
||||
@@ -584,9 +584,9 @@ module gamapp_mpi
|
||||
character(len=strLen) :: BCID
|
||||
|
||||
!Enforce BCs
|
||||
call Tic("BCs")
|
||||
call Tic("BCs", .true.)
|
||||
call EnforceBCs(gamAppMpi%Model,gamAppMpi%Grid,State)
|
||||
call Toc("BCs")
|
||||
call Toc("BCs", .true.)
|
||||
|
||||
!Track timing for all gamera ranks to finish physical BCs
|
||||
! Only synchronize when timing
|
||||
@@ -597,10 +597,10 @@ module gamapp_mpi
|
||||
endif
|
||||
|
||||
!Update ghost cells
|
||||
call Tic("Halos")
|
||||
call Tic("Halos", .true.)
|
||||
call HaloUpdate(gamAppMpi, State)
|
||||
call bFlux2Fld(gamAppMpi%Model, gamappMpi%Grid, State%magFlux, State%Bxyz) !Update Bxyz's
|
||||
call Toc("Halos")
|
||||
call Toc("Halos", .true.)
|
||||
|
||||
!Track timing for all gamera ranks to finish halo comms
|
||||
! Only synchronize when timing
|
||||
@@ -611,6 +611,7 @@ module gamapp_mpi
|
||||
endif
|
||||
|
||||
! Re-apply periodic BCs last
|
||||
call Tic("BCs", .true.)
|
||||
do i=1,gamAppMpi%Grid%NumBC
|
||||
if(allocated(gamAppMpi%Grid%externalBCs(i)%p)) then
|
||||
SELECT type(bc=>gamAppMpi%Grid%externalBCs(i)%p)
|
||||
@@ -649,6 +650,7 @@ module gamapp_mpi
|
||||
endselect
|
||||
endif
|
||||
enddo
|
||||
call Toc("BCs", .true.)
|
||||
|
||||
!Track timing for all gamera ranks to finish periodic BCs
|
||||
! Only synchronize when timing
|
||||
@@ -666,6 +668,7 @@ module gamapp_mpi
|
||||
integer :: ierr,i
|
||||
real(rp) :: tmp
|
||||
|
||||
call Tic("Advance", .true.)
|
||||
!update the state variables to the next timestep
|
||||
call UpdateStateData(gamAppMpi)
|
||||
|
||||
@@ -684,6 +687,7 @@ module gamapp_mpi
|
||||
|
||||
!Update BCs MPI style
|
||||
call updateMpiBCs(gamAppMpi, gamAppmpi%State)
|
||||
call Toc("Advance", .true.)
|
||||
|
||||
end subroutine stepGamera_mpi
|
||||
|
||||
|
||||
@@ -34,21 +34,21 @@ module raijuAdvancer
|
||||
|
||||
State%dt = dtCpl
|
||||
|
||||
call Tic("Pre-Advance")
|
||||
call Tic("Pre-Advance",.true.)
|
||||
call raijuPreAdvance(Model, Grid, State)
|
||||
call Toc("Pre-Advance")
|
||||
call Toc("Pre-Advance",.true.)
|
||||
State%isFirstCpl = .false.
|
||||
|
||||
! Step
|
||||
call Tic("AdvanceState")
|
||||
call Tic("AdvanceState",.true.)
|
||||
call AdvanceState(Model, Grid, State)
|
||||
call Toc("AdvanceState")
|
||||
call Toc("AdvanceState",.true.)
|
||||
|
||||
! etas back to moments
|
||||
call Tic("Moments Eval")
|
||||
call Tic("Moments Eval",.true.)
|
||||
call EvalMoments(Grid, State)
|
||||
call EvalMoments(Grid, State, doAvgO=.true.)
|
||||
call Toc("Moments Eval")
|
||||
call Toc("Moments Eval",.true.)
|
||||
|
||||
end subroutine raijuAdvance
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ module raijuBCs
|
||||
use raijutypes
|
||||
use raijuetautils
|
||||
use raijudomain
|
||||
use raijuColdStartHelper
|
||||
|
||||
implicit none
|
||||
|
||||
@@ -29,12 +30,15 @@ module raijuBCs
|
||||
doWholeDomain = .false.
|
||||
endif
|
||||
|
||||
! Now that topo is set, we can calculate active domain
|
||||
call setActiveDomain(Model, Grid, State)
|
||||
|
||||
call calcMomentIngestionLocs(Model, Grid, State, doWholeDomain, doMomentIngest)
|
||||
call applyMomentIngestion(Model, Grid, State, doMomentIngest)
|
||||
|
||||
if (State%coldStarter%doCS_next_preAdv) then
|
||||
call raijuGeoColdStart(Model, Grid, State, State%t, State%coldStarter%modelDst_next_preAdv, doAccumulateO=.true.)
|
||||
State%coldStarter%doCS_next_preAdv = .false.
|
||||
endif
|
||||
|
||||
|
||||
if (Model%doActiveShell ) then
|
||||
! Do first round of determining active shells for each k
|
||||
@@ -64,9 +68,18 @@ module raijuBCs
|
||||
doMomentIngest = .false.
|
||||
! Determine where to do BCs
|
||||
if(doWholeDomain) then
|
||||
where (State%active .ne. RAIJUINACTIVE)
|
||||
doMomentIngest = .true.
|
||||
end where
|
||||
!where (State%active .ne. RAIJUINACTIVE)
|
||||
! doMomentIngest = .true.
|
||||
!end where
|
||||
associate(sh=>Grid%shGrid)
|
||||
do j=sh%jsg,sh%jeg
|
||||
do i=sh%isg,sh%ieg
|
||||
if (State%active(i,j) .ne. RAIJUINACTIVE) then
|
||||
doMomentIngest(i,j) = .true.
|
||||
endif
|
||||
enddo
|
||||
enddo
|
||||
end associate
|
||||
else
|
||||
|
||||
associate(sh=>Grid%shGrid)
|
||||
@@ -118,7 +131,6 @@ module raijuBCs
|
||||
psphIdx = spcIdx(Grid, F_PSPH)
|
||||
eleIdx = spcIdx(Grid, F_HOTE)
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j,s,fIdx,fm,vm,kT,etaBelow,tmp_kti,tmp_kte,eMin,tmp_D,tmp_P)
|
||||
do j=Grid%shGrid%jsg,Grid%shGrid%jeg
|
||||
do i=Grid%shGrid%isg,Grid%shGrid%ieg
|
||||
@@ -215,8 +227,8 @@ module raijuBCs
|
||||
press2D = State%Pavg (i-2:i+2, j-2:j+2, fIdx)
|
||||
wgt2D = State%domWeights(i-2:i+2, j-2:j+2)
|
||||
isG2D = State%active (i-2:i+2, j-2:j+2) .ne. RAIJUINACTIVE
|
||||
D = sum(den2D * wgt2D, mask=isG2D)/sum(wgt2D, mask=isG2D)
|
||||
P = sum(press2D* wgt2D, mask=isG2D)/sum(wgt2D, mask=isG2D)
|
||||
D = sum(den2D * wgt2D, mask=isG2D)/max( sum(wgt2D, mask=isG2D), TINY)
|
||||
P = sum(press2D* wgt2D, mask=isG2D)/max( sum(wgt2D, mask=isG2D), TINY)
|
||||
endif
|
||||
end subroutine getDomWeightedMoments
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ module raijuColdStartHelper
|
||||
use raijutypes
|
||||
use imaghelper
|
||||
use earthhelper
|
||||
use arrayutil
|
||||
|
||||
use raijuetautils
|
||||
use raijuloss_CX
|
||||
@@ -32,6 +33,8 @@ module raijuColdStartHelper
|
||||
call iXML%Set_Val(coldStarter%tEnd,'coldStarter/tEnd',coldStarter%evalCadence-TINY) ! Don't do any updates as default
|
||||
endif
|
||||
|
||||
coldStarter%doneFirstCS = .false.
|
||||
|
||||
end subroutine initRaijuColdStarter
|
||||
|
||||
|
||||
@@ -41,7 +44,7 @@ module raijuColdStartHelper
|
||||
! Worker routines
|
||||
!------
|
||||
|
||||
subroutine raijuGeoColdStart(Model, Grid, State, t0, dstModel)
|
||||
subroutine raijuGeoColdStart(Model, Grid, State, t0, dstModel, doAccumulateO)
|
||||
!! Cold start RAIJU assuming we are at Earth sometime around 21st century
|
||||
type(raijuModel_T), intent(in) :: Model
|
||||
type(raijuGrid_T), intent(in) :: Grid
|
||||
@@ -50,26 +53,31 @@ module raijuColdStartHelper
|
||||
!! Target time to pull SW values from
|
||||
real(rp), intent(in) :: dstModel
|
||||
!! Current dst of global model
|
||||
logical, optional, intent(in) :: doAccumulateO
|
||||
!! If true, keep State%eta and add coldstart to it.
|
||||
!! If false, replace State%eta with coldStart info
|
||||
!! Default: false
|
||||
|
||||
logical :: isFirstCS
|
||||
logical :: doInitRC, doAccumulate
|
||||
integer :: i,j,k
|
||||
integer :: s, sIdx_p, sIdx_e
|
||||
real(rp) :: dstReal, dstTarget
|
||||
real(rp) :: dps_current, dps_preCX, dps_postCX, dps_rescale, dps_ele
|
||||
real(rp) :: etaScale
|
||||
logical, dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg) :: isGood
|
||||
logical , dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg) :: isGood
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg, Grid%Nk) :: etaCS
|
||||
|
||||
associate(cs=>State%coldStarter)
|
||||
!write(*,*)"Coldstart running..."
|
||||
|
||||
|
||||
isFirstCS = .not. State%coldStarter%doneFirstCS
|
||||
|
||||
if (.not. isFirstCS .and. .not. cs%doUpdate) then
|
||||
return
|
||||
if (present(doAccumulateO)) then
|
||||
doAccumulate = doAccumulateO
|
||||
else
|
||||
doAccumulate = .false.
|
||||
endif
|
||||
|
||||
call fillArray(etaCS, 0.0_rp)
|
||||
|
||||
where (State%active .eq. RAIJUACTIVE)
|
||||
associate(cs=>State%coldStarter)
|
||||
|
||||
where (State%active .ne. RAIJUINACTIVE)
|
||||
isGood = .true.
|
||||
elsewhere
|
||||
isGood = .false.
|
||||
@@ -77,102 +85,154 @@ module raijuColdStartHelper
|
||||
|
||||
sIdx_p = spcIdx(Grid, F_HOTP)
|
||||
sIdx_e = spcIdx(Grid, F_HOTE)
|
||||
|
||||
|
||||
if (isFirstCS) then
|
||||
! Start by nuking all etas we will set up ourselves
|
||||
do s=1,Grid%nSpc
|
||||
!! Skip plasmashere, let that be handled on its own
|
||||
!if ( Grid%spc(s)%flav == F_PSPH) then
|
||||
! continue
|
||||
!endif
|
||||
State%eta(:,:,Grid%spc(s)%kStart:Grid%spc(s)%kEnd) = 0.0
|
||||
enddo
|
||||
endif
|
||||
|
||||
!! Init psphere
|
||||
!if (isFirstCS .or. cs%doPsphUpdate) then
|
||||
! call setRaijuInitPsphere(Model, Grid, State, Model%psphInitKp)
|
||||
!endif
|
||||
|
||||
! Update Dst target
|
||||
dstReal = GetSWVal('symh', Model%tsF, t0)
|
||||
if (isFirstCS) then
|
||||
if (cs%doneFirstCS) then
|
||||
write(*,*)"Already coldstarted once, you shouldn't be here "
|
||||
return
|
||||
else
|
||||
! On first try, we assume there is no existing ring current, and its our job to make up the entire difference
|
||||
dstTarget = dstReal - dstModel
|
||||
|
||||
else if (t0 > (cs%lastEval + cs%evalCadence)) then
|
||||
! If we are updating, there should already be some ring current
|
||||
! If dstReal - dstModel is still < 0, we need to add ADDITIONAL pressure to get them to match
|
||||
dps_current = spcEta2DPS(Model, Grid, State, Grid%spc(sIdx_p), isGood) + spcEta2DPS(Model, Grid, State, Grid%spc(sIdx_e), isGood)
|
||||
dstTarget = dstReal - (dstModel - dps_current)
|
||||
else
|
||||
! Otherwise we have nothing to do, just chill til next update time
|
||||
return
|
||||
endif
|
||||
|
||||
cs%lastEval = t0
|
||||
cs%lastTarget = dstTarget
|
||||
cs%doneFirstCS = .true. ! Whether we do anything or not, we were at least called once
|
||||
|
||||
if (dstTarget > 0) then ! We got nothing to contribute
|
||||
! Now decide if we need to add a starter ring current
|
||||
if (dstTarget >= 0) then ! We've got nothing to contribute
|
||||
write(*,*)"RAIJU coldstart not adding starter ring current"
|
||||
return
|
||||
endif
|
||||
|
||||
if (isFirstCS) then
|
||||
! Init psphere
|
||||
call setRaijuInitPsphere(Model, Grid, State, Model%psphInitKp)
|
||||
! Init hot protons
|
||||
call raiColdStart_initHOTP(Model, Grid, State, t0, dstTarget)
|
||||
dps_preCX = spcEta2DPS(Model, Grid, State, Grid%spc(sIdx_p), isGood)
|
||||
! Hit it with some charge exchange
|
||||
if (cs%doCX) then
|
||||
call raiColdStart_applyCX(Model, Grid, State, Grid%spc(sIdx_p))
|
||||
endif
|
||||
dps_postCX = spcEta2DPS(Model, Grid, State, Grid%spc(sIdx_p), isGood)
|
||||
! Calc moments to update pressure and density
|
||||
call EvalMoments(Grid, State)
|
||||
! Use HOTP moments to set electrons
|
||||
call raiColdStart_initHOTE(Model, Grid, State)
|
||||
dps_ele = spcEta2DPS(Model, Grid, State, Grid%spc(sIdx_e), isGood)
|
||||
dps_current = dps_postCX ! Note: if using fudge we're gonna lose electrons immediately, don't include them in current dst for now
|
||||
! Init hot protons
|
||||
call raiColdStart_initHOTP(Model, Grid, State, t0, dstTarget, etaCS)
|
||||
!call raiColdStart_initHOTP_RCOnly(Model, Grid, State, t0, dstTarget, etaCS)
|
||||
dps_preCX = spcEta2DPS(Model, Grid, State%bvol_cc, etaCS, Grid%spc(sIdx_p), isGood)
|
||||
! Hit it with some charge exchange
|
||||
if (cs%doCX) then
|
||||
call raiColdStart_applyCX(Model, Grid, State, Grid%spc(sIdx_p), etaCS)
|
||||
endif
|
||||
dps_postCX = spcEta2DPS(Model, Grid, State%bvol_cc, etaCS, Grid%spc(sIdx_p), isGood)
|
||||
! Use HOTP moments to set electrons
|
||||
call raiColdStart_initHOTE(Model, Grid, State, etaCS)
|
||||
dps_ele = spcEta2DPS(Model, Grid, State%bvol_cc, etaCS, Grid%spc(sIdx_e), isGood)
|
||||
dps_current = dps_postCX ! Note: if using fudge we're gonna lose electrons immediately, don't include them in current dst for now
|
||||
|
||||
etaScale = abs(dstTarget / dps_current)
|
||||
State%eta(:,:,Grid%spc(sIdx_p)%kStart:Grid%spc(sIdx_p)%kEnd) = etaScale*State%eta(:,:,Grid%spc(sIdx_p)%kStart:Grid%spc(sIdx_p)%kEnd)
|
||||
dps_rescale = spcEta2DPS(Model, Grid, State, Grid%spc(sIdx_p), isGood)
|
||||
|
||||
if (isfirstCS) then
|
||||
write(*,*) "RAIJU Cold starting..."
|
||||
write(*,'(a,f7.2)') " Real Dst : ",dstReal
|
||||
write(*,'(a,f7.2)') " Model Dst : ",dstModel
|
||||
write(*,'(a,f7.2)') " Target DPS-Dst : ",dstTarget
|
||||
write(*,'(a,f7.2)') " Hot proton pre-loss : ",dps_preCX
|
||||
write(*,'(a,f7.2)') " post-loss : ",dps_postCX
|
||||
write(*,'(a,f7.2)') " post-rescale : ",dps_rescale
|
||||
write(*,'(a,f7.2)') " Hot electron DPS-Dst : ",dps_ele
|
||||
if (dstTarget < 0) then
|
||||
etaScale = abs(dstTarget / dps_current)
|
||||
etaCS(:,:,Grid%spc(sIdx_p)%kStart:Grid%spc(sIdx_p)%kEnd) = etaScale*etaCS(:,:,Grid%spc(sIdx_p)%kStart:Grid%spc(sIdx_p)%kEnd)
|
||||
dps_rescale = spcEta2DPS(Model, Grid, State%bvol_cc, etaCS, Grid%spc(sIdx_p), isGood)
|
||||
else
|
||||
write(*,'(a,f7.2)') " Real Dst : ",dstReal
|
||||
write(*,'(a,f7.2)') " Model Dst : ",dstModel
|
||||
write(*,'(a,f7.2)') " Current DPS-Dst : ",dps_current
|
||||
write(*,'(a,f7.2)') " Target DPS-Dst : ",dstTarget
|
||||
write(*,'(a,f7.2)') " post-rescale : ",dps_rescale
|
||||
write(*,'(a,f7.2)') " Hot electron DPS-Dst : ",dps_ele
|
||||
dps_rescale = dps_current
|
||||
endif
|
||||
|
||||
write(*,*) "RAIJU Cold starting..."
|
||||
write(*,'(a,f7.2)') " Real Dst : ",dstReal
|
||||
write(*,'(a,f7.2)') " Model Dst : ",dstModel
|
||||
write(*,'(a,f7.2)') " Target DPS-Dst : ",dstTarget
|
||||
write(*,'(a,f7.2)') " Hot proton pre-loss : ",dps_preCX
|
||||
write(*,'(a,f7.2)') " post-loss : ",dps_postCX
|
||||
write(*,'(a,f7.2)') " post-rescale : ",dps_rescale
|
||||
write(*,'(a,f7.2)') " Hot electron DPS-Dst : ",dps_ele
|
||||
|
||||
end associate
|
||||
|
||||
State%coldStarter%doneFirstCS = .true.
|
||||
! finally, put it into raiju state
|
||||
if(doAccumulate) then
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP private(i,j,k)
|
||||
do j=Grid%shGrid%jsg,Grid%shGrid%jeg
|
||||
do i=Grid%shGrid%isg,Grid%shGrid%ieg
|
||||
do k=1,Grid%Nk
|
||||
if(etaCS(i,j,k) > State%eta(i,j,k)) then
|
||||
State%eta(i,j,k) = etaCS(i,j,k)
|
||||
endif
|
||||
enddo
|
||||
enddo
|
||||
enddo
|
||||
else
|
||||
State%eta = etaCS
|
||||
endif
|
||||
|
||||
end subroutine raijuGeoColdStart
|
||||
|
||||
|
||||
subroutine raiColdStart_initHOTP(Model, Grid, State, t0, dstTarget)
|
||||
subroutine raiColdStart_initHOTP_RCOnly(Model, Grid, State, t0, dstTarget, etaCS)
|
||||
type(raijuModel_T), intent(in) :: Model
|
||||
type(raijuGrid_T), intent(in) :: Grid
|
||||
type(raijuState_T), intent(inout) :: State
|
||||
type(raijuState_T), intent(in) :: State
|
||||
real(rp), intent(in) :: t0
|
||||
!! Target time to pull SW values from
|
||||
real(rp), intent(in) :: dstTarget
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg, Grid%Nk), intent(inout) :: etaCS
|
||||
|
||||
real(rp) :: dstTarget_p
|
||||
logical :: isInTM03
|
||||
integer :: i,j,sIdx
|
||||
integer, dimension(2) :: ij_TM
|
||||
real(rp) :: vSW, dSW, dPS_emp, pPS_emp, ktPS_emp
|
||||
real(rp) :: x0_TM, y0_TM, T0_TM, Bvol0_TM, P0_ps, N0_ps
|
||||
real(rp) :: L, vm, P_rc, D_rc, kt_rc
|
||||
|
||||
sIdx = spcIdx(Grid, F_HOTP)
|
||||
associate(sh=>Grid%shGrid, spc=>Grid%spc(sIdx))
|
||||
|
||||
! Set everything to zero to start
|
||||
etaCS(:,:,spc%kStart:spc%kEnd) = 0.0_rp
|
||||
|
||||
! Scale target Dst down to account for electrons contributing stuff later
|
||||
dstTarget_p = dstTarget / (1.0 + 1.0/Model%tiote)
|
||||
call SetQTRC(dstTarget_p,doVerbO=.false.) ! This sets a global QTRC_P0 inside earthhelper.F90
|
||||
|
||||
|
||||
! Get reference TM value at -10 Re
|
||||
x0_TM = -10.0-TINY
|
||||
y0_TM = 0.0
|
||||
! Empirical temperature
|
||||
call EvalTM03([x0_TM,y0_TM,0.0_rp],N0_ps,P0_ps,isInTM03)
|
||||
T0_TM = DP2kT(N0_ps, P0_ps)
|
||||
! Model FTV
|
||||
ij_TM = minloc( sqrt( (State%xyzMincc(:,:,XDIR)-x0_TM)**2 + (State%xyzMincc(:,:,YDIR)**2) ) )
|
||||
Bvol0_TM = State%bvol_cc(ij_TM(IDIR), ij_TM(JDIR))
|
||||
|
||||
! Now set our initial density and pressure profile
|
||||
do j=sh%jsg,sh%jeg
|
||||
do i=sh%isg,sh%ie ! Note: Not setting low lat ghosts, we want them to be zero
|
||||
|
||||
if (State%active(i,j) .eq. RAIJUINACTIVE) cycle
|
||||
|
||||
L = norm2(State%xyzMincc(i,j,XDIR:YDIR))
|
||||
vm = State%bvol_cc(i,j)**(-2./3.)
|
||||
|
||||
kt_rc = T0_TM*(Bvol0_TM/State%bvol_cc(i,j))**(2./3.)
|
||||
kt_rc = min(kt_rc, 4.0*T0_TM) ! Limit cap. Not a big fan, but without cap we get stuff that's too energetic and won't go away (until FLC maybe)
|
||||
|
||||
|
||||
P_rc = P_QTRC(L) ! From earthhelper.F90
|
||||
D_rc = PkT2Den(P_rc, kt_rc)
|
||||
|
||||
|
||||
! Finally map it to HOTP etas
|
||||
call DkT2SpcEta(Model, spc, etaCS(i,j,spc%kStart:spc%kEnd), D_rc, kt_rc, vm)
|
||||
|
||||
enddo
|
||||
enddo
|
||||
|
||||
end associate
|
||||
|
||||
end subroutine raiColdStart_initHOTP_RCOnly
|
||||
|
||||
|
||||
subroutine raiColdStart_initHOTP(Model, Grid, State, t0, dstTarget, etaCS)
|
||||
type(raijuModel_T), intent(in) :: Model
|
||||
type(raijuGrid_T), intent(in) :: Grid
|
||||
type(raijuState_T), intent(in) :: State
|
||||
real(rp), intent(in) :: t0
|
||||
!! Target time to pull SW values from
|
||||
real(rp), intent(in) :: dstTarget
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg, Grid%Nk), intent(inout) :: etaCS
|
||||
|
||||
real(rp) :: dstTarget_p
|
||||
logical :: isInTM03
|
||||
@@ -192,8 +252,10 @@ module raijuColdStartHelper
|
||||
call InitTM03(Model%tsF,t0)
|
||||
|
||||
! Scale target Dst down to account for electrons contributing stuff later
|
||||
dstTarget_p = dstTarget / (1.0 + 1.0/Model%tiote)
|
||||
call SetQTRC(dstTarget_p,doVerbO=.false.) ! This sets a global QTRC_P0 inside earthhelper.F90
|
||||
if (dstTarget < 0) then
|
||||
dstTarget_p = dstTarget / (1.0 + 1.0/Model%tiote)
|
||||
call SetQTRC(dstTarget_p,doVerbO=.false.) ! This sets a global QTRC_P0 inside earthhelper.F90
|
||||
endif
|
||||
|
||||
! Get Borovsky statistical values
|
||||
vSW = abs(GetSWVal("Vx",Model%tsF,t0))
|
||||
@@ -218,7 +280,7 @@ module raijuColdStartHelper
|
||||
endif
|
||||
|
||||
! Set everything to zero to start
|
||||
State%eta(:,:,spc%kStart:spc%kEnd) = 0.0_rp
|
||||
etaCS(:,:,spc%kStart:spc%kEnd) = 0.0_rp
|
||||
|
||||
! Now set our initial density and pressure profile
|
||||
do j=sh%jsg,sh%jeg
|
||||
@@ -234,7 +296,11 @@ module raijuColdStartHelper
|
||||
|
||||
call EvalTM03_SM(State%xyzMincc(i,j,:),N0_ps,P0_ps,isInTM03)
|
||||
|
||||
P_rc = P_QTRC(L) ! From earthhelper.F90
|
||||
if (dstTarget < 0) then
|
||||
P_rc = P_QTRC(L) ! From earthhelper.F90
|
||||
else
|
||||
P_rc = 0.0
|
||||
endif
|
||||
|
||||
if (.not. isInTM03) then
|
||||
N0_ps = dPS_emp
|
||||
@@ -250,7 +316,7 @@ module raijuColdStartHelper
|
||||
endif
|
||||
|
||||
! Finally map it to HOTP etas
|
||||
call DkT2SpcEta(Model, spc, State%eta(i,j,spc%kStart:spc%kEnd), D_final, kt_rc, vm)
|
||||
call DkT2SpcEta(Model, spc, etaCS(i,j,spc%kStart:spc%kEnd), D_final, kt_rc, vm)
|
||||
|
||||
enddo
|
||||
enddo
|
||||
@@ -260,11 +326,12 @@ module raijuColdStartHelper
|
||||
end subroutine raiColdStart_initHOTP
|
||||
|
||||
|
||||
subroutine raiColdStart_applyCX(Model, Grid, State, spc)
|
||||
subroutine raiColdStart_applyCX(Model, Grid, State, spc, etaCS)
|
||||
type(raijuModel_T), intent(in) :: Model
|
||||
type(raijuGrid_T), intent(in) :: Grid
|
||||
type(raijuState_T), intent(inout) :: State
|
||||
type(raijuState_T), intent(in) :: State
|
||||
type(raijuSpecies_T), intent(in) :: spc
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg, Grid%Nk), intent(inout) :: etaCS
|
||||
|
||||
integer :: i,j,k
|
||||
type(raiLoss_CX_T) :: lossCX
|
||||
@@ -275,13 +342,12 @@ module raijuColdStartHelper
|
||||
call lossCX%doInit(Model, Grid, nullXML)
|
||||
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j,tau)
|
||||
!$OMP private(i,j,k,tau)
|
||||
do j=Grid%shGrid%jsg,Grid%shGrid%jeg
|
||||
do i=Grid%shGrid%isg,Grid%shGrid%ieg
|
||||
do k = spc%kStart,spc%kEnd
|
||||
tau = lossCX%calcTau(Model, Grid, State, i, j, k)
|
||||
State%eta(i,j,k) = State%eta(i,j,k)*exp(-tCX/tau)
|
||||
etaCS(i,j,k) = etaCS(i,j,k)*exp(-tCX/tau)
|
||||
enddo
|
||||
enddo
|
||||
enddo
|
||||
@@ -289,38 +355,49 @@ module raijuColdStartHelper
|
||||
end subroutine raiColdStart_applyCX
|
||||
|
||||
|
||||
subroutine raiColdStart_initHOTE(Model, Grid, State)
|
||||
subroutine raiColdStart_initHOTE(Model, Grid, State, etaCS)
|
||||
type(raijuModel_T), intent(in) :: Model
|
||||
type(raijuGrid_T), intent(in) :: Grid
|
||||
type(raijuState_T), intent(inout) :: State
|
||||
type(raijuState_T), intent(in) :: State
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg, Grid%Nk), intent(inout) :: etaCS
|
||||
|
||||
integer :: sIdx_e, sIdx_p
|
||||
integer :: i,j
|
||||
real(rp) :: press_p, den_p
|
||||
real(rp) :: kt_p, kt_e, den, vm
|
||||
|
||||
sIdx_p = spcIdx(Grid, F_HOTP)
|
||||
sIdx_e = spcIdx(Grid, F_HOTE)
|
||||
|
||||
|
||||
associate(spc_p=>Grid%spc(sIdx_p), spc_e=>Grid%spc(sIdx_e))
|
||||
! Set everything to zero to start
|
||||
State%eta(:,:,Grid%spc(sIdx_e)%kStart:Grid%spc(sIdx_e)%kEnd) = 0.0_rp
|
||||
etaCS(:,:,spc_e%kStart:spc_e%kEnd) = 0.0_rp
|
||||
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j,vm,den,kt_p,kt_e)
|
||||
!$OMP private(i,j,vm,den_p, press_p,kt_p,kt_e)
|
||||
do j=Grid%shGrid%jsg,Grid%shGrid%jeg
|
||||
do i=Grid%shGrid%isg,Grid%shGrid%ie ! Note: Not setting low lat ghosts, we want them to be zero
|
||||
if (State%active(i,j) .eq. RAIJUINACTIVE) cycle
|
||||
|
||||
vm = State%bvol_cc(i,j)**(-2./3.)
|
||||
den = State%Den(sIdx_p)%data(i,j)
|
||||
kt_p = DP2kT(den, State%Press(sIdx_p)%data(i,j))
|
||||
!den = State%Den(sIdx_p)%data(i,j)
|
||||
!kt_p = DP2kT(den, State%Press(sIdx_p)%data(i,j))
|
||||
|
||||
den_p = SpcEta2Den (spc_p, etaCS(i,j,spc_p%kStart:spc_p%kEnd), State%bvol_cc(i,j))
|
||||
press_p = SpcEta2Press(spc_p, etaCS(i,j,spc_p%kStart:spc_p%kEnd), State%bvol_cc(i,j))
|
||||
kt_p = DP2kT(den_p, press_p)
|
||||
|
||||
kt_e = kt_p / Model%tiote
|
||||
call DkT2SpcEta(Model, Grid%spc(sIdx_e), &
|
||||
State%eta(i,j,Grid%spc(sIdx_e)%kStart:Grid%spc(sIdx_e)%kEnd), &
|
||||
den, kt_e, vm)
|
||||
etaCS(i,j,Grid%spc(sIdx_e)%kStart:Grid%spc(sIdx_e)%kEnd), &
|
||||
den_p, kt_e, vm)
|
||||
enddo
|
||||
enddo
|
||||
|
||||
end associate
|
||||
|
||||
end subroutine raiColdStart_initHOTE
|
||||
|
||||
|
||||
|
||||
@@ -178,18 +178,19 @@ module raijuetautils
|
||||
end function SpcEta2Press
|
||||
|
||||
|
||||
function spcEta2DPS(Model, Grid, State, spc, isGood) result(dpsdst)
|
||||
function spcEta2DPS(Model, Grid, bVol_cc, eta, spc, isGood) result(dpsdst)
|
||||
!! Calculate total DPS-Dst for given species within the defined isGood domain
|
||||
type(raijuModel_T), intent(in) :: Model
|
||||
type(raijuGrid_T), intent(in) :: Grid
|
||||
type(raijuState_T), intent(in) :: State
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg), intent(in) :: bVol_cc
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg, Grid%Nk), intent(in) :: eta
|
||||
type(raijuSpecies_T), intent(in) :: spc
|
||||
logical, dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg), intent(in) :: isGood
|
||||
!! Eval mask, true = point is included in calculation
|
||||
|
||||
real(rp) :: dpsdst
|
||||
integer :: i,j,k
|
||||
real(rp) :: press, bVol, energyDen, energy
|
||||
integer :: i,j
|
||||
real(rp) :: press, energyDen, energy
|
||||
logical :: isDead = .false.
|
||||
|
||||
dpsdst = 0.0
|
||||
@@ -197,9 +198,8 @@ module raijuetautils
|
||||
do j=Grid%shGrid%jsg,Grid%shGrid%jeg
|
||||
do i=Grid%shGrid%isg,Grid%shGrid%ieg
|
||||
if (.not. isGood(i,j)) cycle
|
||||
bVol = State%bvol_cc(i,j)
|
||||
press = SpcEta2Press(spc, State%eta(i,j,spc%kStart:spc%kEnd), bVol) ! [nPa]
|
||||
energyDen = (press*1.0D-9) * (bVol*Model%planet%rp_m*1.0D9) * (Grid%Brcc(i,j)*1.0D-9)/kev2J ! p[J/m^3] * bVol[m/T] * B[T] = [J/m^2] * keV/J = [keV/m^2]
|
||||
press = SpcEta2Press(spc, eta(i,j,spc%kStart:spc%kEnd), bvol_cc(i,j)) ! [nPa]
|
||||
energyDen = (press*1.0D-9) * (bVol_cc(i,j)*Model%planet%rp_m*1.0D9) * (Grid%Brcc(i,j)*1.0D-9)/kev2J ! p[J/m^3] * bVol[m/T] * B[T] = [J/m^2] * keV/J = [keV/m^2]
|
||||
energy = energyDen*(Grid%areaCC(i,j)*Model%planet%ri_m**2) ! [keV/m^2]* Re^2[m^2] = [keV]
|
||||
dpsdst = dpsdst - 4.2*(1.0D-30)*energy ! [nT]
|
||||
enddo
|
||||
|
||||
@@ -16,7 +16,7 @@ module raijuIO
|
||||
|
||||
implicit none
|
||||
|
||||
integer, parameter, private :: MAXIOVAR = 70
|
||||
integer, parameter, private :: MAXIOVAR = 100
|
||||
logical, private :: doRoot = .true. !Whether root variables need to be written
|
||||
logical, private :: doFat = .false. !Whether to output lots of extra datalogical, private :: doRoot = .true. !Whether root variables need to be written
|
||||
|
||||
@@ -154,7 +154,7 @@ module raijuIO
|
||||
logical, optional, intent(in) :: doGhostsO
|
||||
|
||||
type(IOVAR_T), dimension(MAXIOVAR) :: IOVars
|
||||
integer :: i,j,k,s
|
||||
integer :: i,j,k,s, nClkSteps
|
||||
integer :: is, ie, js, je, ks, ke
|
||||
integer, dimension(4) :: outBnds2D
|
||||
logical :: doGhosts
|
||||
@@ -424,8 +424,14 @@ module raijuIO
|
||||
deallocate(outTmp2D)
|
||||
endif
|
||||
|
||||
call WriteVars(IOVars,.true.,Model%raijuH5, gStr)
|
||||
!Performance Metrics
|
||||
nClkSteps = readNCalls('DeepUpdate')
|
||||
call AddOutVar(IOVars, "_perf_stepTime", readClock(1)/nClkSteps)
|
||||
call AddOutVar(IOVars, "_perf_preAdvance", readClock("Pre-Advance")/nClkSteps)
|
||||
call AddOutVar(IOVars, "_perf_advanceState", readClock("AdvanceState")/nClkSteps)
|
||||
call AddOutVar(IOVars, "_perf_moments", readClock("Moments Eval")/nClkSteps)
|
||||
|
||||
call WriteVars(IOVars,.true.,Model%raijuH5, gStr)
|
||||
|
||||
! Any extra groups to add
|
||||
if (Model%doLosses .and. Model%doOutput_3DLoss) then
|
||||
@@ -497,6 +503,12 @@ module raijuIO
|
||||
else
|
||||
call AddOutVar(IOVars,"cs_doneFirstCS", 0)
|
||||
endif
|
||||
if (State%coldStarter%doCS_next_preAdv) then
|
||||
call AddOutVar(IOVars,"cs_doCS_next_preAdv", 1)
|
||||
else
|
||||
call AddOutVar(IOVars,"cs_doCS_next_preAdv", 0)
|
||||
endif
|
||||
call AddOutVar(IOVars, "cs_modelDst_next_preAdv", State%coldStarter%modelDst_next_preAdv)
|
||||
call AddOutVar(IOVars, "cs_lastEval", State%coldStarter%lastEval)
|
||||
call AddOutVar(IOVars, "cs_lastTarget", State%coldStarter%lastTarget)
|
||||
|
||||
@@ -592,6 +604,8 @@ module raijuIO
|
||||
call AddInVar(IOVars,"nRes")
|
||||
call AddInVar(IOVars,"tRes")
|
||||
call AddInVar(IOVars,"isFirstCpl")
|
||||
call AddInVar(IOVars,"cs_doCS_next_preAdv")
|
||||
call AddInVar(IOVars,"cs_modelDst_next_preAdv")
|
||||
call AddInVar(IOVars,"cs_doneFirstCS")
|
||||
call AddInVar(IOVars,"cs_lastEval")
|
||||
call AddInVar(IOVars,"cs_lastTarget")
|
||||
@@ -645,12 +659,15 @@ module raijuIO
|
||||
! Coldstarter
|
||||
State%coldStarter%lastEval = GetIOReal(IOVars, "cs_lastEval")
|
||||
State%coldStarter%lastTarget = GetIOReal(IOVars, "cs_lastTarget")
|
||||
State%coldStarter%modelDst_next_preAdv = GetIOReal(IOVars, "cs_modelDst_next_preAdv")
|
||||
|
||||
! Handle boolean attributes
|
||||
tmpInt = GetIOInt(IOVars, "isFirstCpl")
|
||||
State%isFirstCpl = tmpInt .eq. 1
|
||||
tmpInt = GetIOInt(IOVars, "cs_doneFirstCS")
|
||||
State%coldStarter%doneFirstCS = tmpInt .eq. 1
|
||||
tmpInt = GetIOInt(IOVars, "cs_doCS_next_preAdv")
|
||||
State%coldStarter%doCS_next_preAdv = tmpInt .eq. 1
|
||||
|
||||
call IOArray2DFill(IOVars, "xmin" , State%xyzMin(:,:,XDIR))
|
||||
call IOArray2DFill(IOVars, "ymin" , State%xyzMin(:,:,YDIR))
|
||||
|
||||
@@ -131,7 +131,7 @@ module raijuOut
|
||||
if (maxP_MLT > 24) maxP_MLT = maxP_MLT - 24D0
|
||||
write(*,'(a,I0,a,f7.2,a,f7.2,a,f5.2,a,f5.2,a,f7.2)') ' ', &
|
||||
Grid%spc(s)%flav, ': P =', maxPress,', D =',maxDen,' @ ',maxP_L,' Rp,',maxP_MLT, &
|
||||
" MLT; DPS:",spcEta2DPS(Model, Grid, State, Grid%spc(sIdx), State%active .eq. RAIJUACTIVE)
|
||||
" MLT; DPS:",spcEta2DPS(Model, Grid, State%bvol_cc, State%eta_avg, Grid%spc(sIdx), State%active .eq. RAIJUACTIVE)
|
||||
|
||||
enddo
|
||||
write(*,'(a)',advance="no") ANSIRESET
|
||||
|
||||
@@ -36,12 +36,18 @@ module raijuPreAdvancer
|
||||
call fillArray(State%eta_avg, 0.0_rp)
|
||||
! (losses handled in updateRaiLosses)
|
||||
|
||||
! Now that topo is set, we can calculate active domain
|
||||
call setActiveDomain(Model, Grid, State)
|
||||
|
||||
! Moments to etas, initial active shell calculation
|
||||
call Tic("BCs")
|
||||
call applyRaijuBCs(Model, Grid, State, doWholeDomainO=State%isFirstCpl) ! If fullEtaMap=True, mom2eta map is applied to the whole domain
|
||||
if (State%isFirstCpl) then
|
||||
call setRaijuInitPsphere(Model, Grid, State, Model%psphInitKp)
|
||||
endif
|
||||
call Toc("BCs")
|
||||
|
||||
! Handle plasmaasphere refilling for the full step about to happen
|
||||
! Handle plasmasphere refilling for the full step about to happen
|
||||
call plasmasphereRefill(Model,Grid,State)
|
||||
|
||||
! Handle edge cases that may effect the validity of information carried over from last coupling period
|
||||
@@ -261,7 +267,7 @@ module raijuPreAdvancer
|
||||
|
||||
associate(sh=>Grid%shGrid)
|
||||
! Gauss-Green calculation of cell-averaged gradients
|
||||
call potExB(Grid%shGrid, State, pExB, doSmoothO=.true., isGCornerO=isGCorner) ! [V]
|
||||
call potExB(Grid%shGrid, State, pExB, doSmoothO=Model%doSmoothGrads, isGCornerO=isGCorner) ! [V]
|
||||
call potCorot(Model%planet, Grid%shGrid, pCorot, Model%doGeoCorot) ! [V]
|
||||
call calcGradIJ_cc(Model%planet%rp_m, Grid, isGCorner, pExB , State%gradPotE_cc , doLimO=.true. ) ! [V/m]
|
||||
call calcGradIJ_cc(Model%planet%rp_m, Grid, isGCorner, pCorot, State%gradPotCorot_cc, doLimO=.false.) ! [V/m]
|
||||
@@ -270,7 +276,7 @@ module raijuPreAdvancer
|
||||
! lambda is constant, so just need grad(V^(-2/3) )
|
||||
call calcGradVM_cc(Model%planet%rp_m, Model%planet%ri_m, Model%planet%magMoment, &
|
||||
Grid, isGCorner, State%bvol, State%gradVM_cc, &
|
||||
doSmoothO=.true., doLimO=.true.)
|
||||
doSmoothO=Model%doSmoothGrads, doLimO=.true.)
|
||||
end associate
|
||||
|
||||
end subroutine calcPotGrads_cc
|
||||
@@ -304,7 +310,6 @@ module raijuPreAdvancer
|
||||
associate(sh=>Grid%shGrid)
|
||||
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j,qLow,qHigh)
|
||||
do j=sh%jsg,sh%jeg
|
||||
do i=sh%isg,sh%ieg
|
||||
@@ -332,7 +337,6 @@ module raijuPreAdvancer
|
||||
allocate(gradQtmp(sh%isg:sh%ieg,sh%jsg:sh%jeg, 2))
|
||||
gradQtmp = gradQ
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j)
|
||||
do j=sh%jsg+1,sh%jeg-1
|
||||
do i=sh%isg+1,sh%ieg-1
|
||||
@@ -483,13 +487,19 @@ module raijuPreAdvancer
|
||||
gradVM(:,:,RAI_TH) = gradVM(:,:,RAI_TH) + dV0_dth
|
||||
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j,bVolcc)
|
||||
do j=sh%jsg,sh%jeg
|
||||
do i=sh%isg,sh%ieg
|
||||
bVolcc = toCenter2D(dV(i:i+1,j:j+1)) + DipFTV_colat(Grid%thcRp(i), B0) ! Will include smoothing of dV if enabled
|
||||
!bVolcc = toCenter2D(V(i:i+1,j:j+1))
|
||||
gradVM(i,j,:) = (-2./3.)*bVolcc**(-5./3.)*gradVM(i,j,:)
|
||||
if(all(isGcorner(i:i+1,j:j+1))) then
|
||||
!bVolcc = toCenter2D(dV(i:i+1,j:j+1)) + DipFTV_colat(Grid%thcRp(i), B0) ! Will include smoothing of dV if enabled
|
||||
bVolcc = toCenter2D(V(i:i+1,j:j+1))
|
||||
gradVM(i,j,:) = (-2./3.)*bVolcc**(-5./3.)*gradVM(i,j,:)
|
||||
else
|
||||
! gradVM should be zero for this point coming out of calcGradIJ_cc, but set to dipole value just in case
|
||||
gradVM(i,j,RAI_PH) = 0.0
|
||||
gradVM(i,j,RAI_TH) = 0.0
|
||||
!gradVM(i,j,RAI_TH) = (-2./3.)*DipFTV_colat(Grid%thcRp(i), B0)**(-5./3.)*dV0_dth(i,j)
|
||||
endif
|
||||
enddo
|
||||
enddo
|
||||
|
||||
@@ -564,7 +574,6 @@ module raijuPreAdvancer
|
||||
enddo
|
||||
! Now everyone else
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j)
|
||||
do j=jsg+1,jeg
|
||||
do i=isg+1,ieg
|
||||
@@ -572,6 +581,8 @@ module raijuPreAdvancer
|
||||
enddo
|
||||
enddo
|
||||
|
||||
call wrapJcorners(sh, Vsm)
|
||||
|
||||
! Write back to provided array
|
||||
V = Vsm
|
||||
end associate
|
||||
|
||||
@@ -175,6 +175,7 @@ module raijustarter
|
||||
Model%activeDomRad = abs(Model%activeDomRad)
|
||||
|
||||
!---Solver ---!
|
||||
call iXML%Set_Val(Model%doSmoothGrads,'sim/doSmoothGrads',def_doSmoothGrads)
|
||||
call iXML%Set_Val(Model%doUseVelLRs,'sim/useVelLRs',def_doUseVelLRs)
|
||||
call iXML%Set_Val(Model%maxItersPerSec,'sim/maxIter',def_maxItersPerSec)
|
||||
call iXML%Set_Val(Model%maxOrder,'sim/maxOrder',7)
|
||||
@@ -553,6 +554,8 @@ module raijustarter
|
||||
! Similarly, set vaFrac to safe value in case stand-alone never writes to it
|
||||
State%vaFrac = 1.0
|
||||
|
||||
State%isFirstCpl = .true.
|
||||
|
||||
! Init State sub-modules
|
||||
if (Model%isSA) then
|
||||
! If we are standalone, this is the place to get coldStarter settings
|
||||
|
||||
@@ -231,7 +231,7 @@ module imag2mix_interface
|
||||
imP_avg(RAI_EDEN) = imP_avg(RAI_EDEN )/nPnts**2
|
||||
imP_avg(RAI_EPRE) = imP_avg(RAI_EPRE )/nPnts**2
|
||||
imP_avg(RAI_NPSP) = imP_avg(RAI_NPSP )/nPnts**2
|
||||
imP_avg(RAI_EAVG) = imP_avg(RAI_EFLUX) / imP_avg(RAI_ENFLX)
|
||||
imP_avg(RAI_EAVG) = imP_avg(RAI_EFLUX) / max(TINY, imP_avg(RAI_ENFLX))
|
||||
imP_avg(RAI_GTYPE) = imP_avg(RAI_GTYPE)/nGood
|
||||
imP_avg(RAI_THCON) = imP_avg(RAI_THCON)/nGood
|
||||
imP_avg(RAI_PHCON) = imP_avg(RAI_PHCON)/nGood
|
||||
|
||||
@@ -43,11 +43,7 @@ submodule (volttypes) raijuCplTypesSub
|
||||
! Update MJD with whatever voltron handed us
|
||||
! If we are restarting, this will get replaced with whatever's in file later
|
||||
App%raiApp%State%mjd = App%opt%mjd0
|
||||
write(*,*)"MJD0=",App%opt%mjd0
|
||||
if (App%opt%doColdStart) then
|
||||
! We are gonna cold start, so ignore plasma ingestion rules for first coupling
|
||||
App%raiApp%State%isFirstCpl = .false.
|
||||
endif
|
||||
|
||||
! Then allocate and initialize coupling variables based on raiju app
|
||||
call raijuCpl_init(App, xml)
|
||||
|
||||
@@ -58,61 +54,29 @@ submodule (volttypes) raijuCplTypesSub
|
||||
class(raijuCoupler_T), intent(inout) :: App
|
||||
class(voltApp_T), intent(inout) :: vApp
|
||||
|
||||
logical :: doFirstColdStart
|
||||
logical :: doUpdateColdStart
|
||||
real(rp) :: BSDst
|
||||
|
||||
doFirstColdStart = .false.
|
||||
doUpdateColdStart = .false.
|
||||
|
||||
associate(raiApp=>App%raiApp)
|
||||
|
||||
! If we are running realtime, its our job to get everything we need from vApp into raiCpl
|
||||
if (.not. App%raiApp%Model%isSA) then
|
||||
! First, determine if we should cold start, i.e. Completely reset raiju's eta's to match some target conditions
|
||||
! Determine if we should cold start before packing coupler because it will set tLastUpdate to vApp%time and then we can't do the checks we want
|
||||
! But actually do cold start after coupler packing completes so we can use real field line info
|
||||
|
||||
! Do we do our very first coldstart ever
|
||||
if (App%opt%doColdStart .and. App%tLastUpdate < 0.0 .and. vApp%time >= 0.0) then
|
||||
doFirstColdStart = .true.
|
||||
endif
|
||||
! Do we do "updates" to our coldstart during pre-conditioning period
|
||||
if(App%opt%doColdStart .and. App%tLastUpdate > 0.0 .and. vApp%time < App%startup_blendTscl) then
|
||||
doUpdateColdStart = .true.
|
||||
endif
|
||||
|
||||
call packRaijuCoupler_RT(App, vApp)
|
||||
endif
|
||||
|
||||
! Someone updated raiCpl's coupling variables by now, stuff it into RAIJU proper
|
||||
call raiCpl2RAIJU(App)
|
||||
|
||||
if (.not. raiApp%State%coldStarter%doneFirstCS .or. vApp%time < raiApp%State%coldStarter%tEnd) then
|
||||
!! Make sure we run at least once
|
||||
call setActiveDomain(raiApp%Model, raiApp%Grid, raiApp%State)
|
||||
! Calc voltron dst ourselves since vApp%BSDst is only set on console output
|
||||
call EstDST(vApp%gApp%Model,vApp%gApp%Grid,vApp%gApp%State,BSDst0=BSDst)
|
||||
call raijuGeoColdStart(raiApp%Model, raiApp%Grid, raiApp%State, vApp%time, BSDst)
|
||||
endif
|
||||
!if (doFirstColdStart) then
|
||||
! ! Its happening, everybody stay calm
|
||||
! write(*,*) "RAIJU Doing first cold start..."
|
||||
! ! NOTE: By this point we have put coupling info into raiju (e.g. bVol, xyzmin, MHD moments)
|
||||
! ! But haven't calculated active domain yet because that happens in preadvancer
|
||||
! ! So we jump in and do it here so we have it for cold starting
|
||||
! call setActiveDomain(raiApp%Model, raiApp%Grid, raiApp%State)
|
||||
! ! Calc voltron dst ourselves since vApp%BSDst is only set on console output
|
||||
! call EstDST(vApp%gApp%Model,vApp%gApp%Grid,vApp%gApp%State,BSDst0=BSDst)
|
||||
! call raijuGeoColdStart(raiApp%Model, raiApp%Grid, raiApp%State, vApp%time, BSDst, doCXO=App%doColdstartCX,doPsphO=.true.)
|
||||
!endif
|
||||
!if (doUpdateColdStart) then
|
||||
! write(*,*)"RAIJU doing update cold start at t=",vApp%time
|
||||
! write(*,*)" (calculating model BSDst,)",vApp%time
|
||||
! call setActiveDomain(raiApp%Model, raiApp%Grid, raiApp%State)
|
||||
! call EstDST(vApp%gApp%Model,vApp%gApp%Grid,vApp%gApp%State,BSDst0=BSDst)
|
||||
! call raijuGeoColdStart(raiApp%Model, raiApp%Grid, raiApp%State, vApp%time, BSDst, doCXO=App%doColdstartCX,doPsphO=.false.)
|
||||
!endif
|
||||
associate(cs=>raiApp%State%coldStarter)
|
||||
if (.not. cs%doneFirstCS .or. (cs%doUpdate .and. vApp%time < cs%tEnd) ) then
|
||||
!! Make sure we run at least once
|
||||
! Calc voltron dst ourselves since vApp%BSDst is only set on console output
|
||||
call EstDST(vApp%gApp%Model,vApp%gApp%Grid,vApp%gApp%State,BSDst0=BSDst)
|
||||
raiApp%State%coldStarter%doCS_next_preAdv = .true.
|
||||
raiApp%State%coldStarter%modelDst_next_preAdv = BSDst
|
||||
!call setActiveDomain(raiApp%Model, raiApp%Grid, raiApp%State)
|
||||
!call raijuGeoColdStart(raiApp%Model, raiApp%Grid, raiApp%State, vApp%time, BSDst)
|
||||
endif
|
||||
end associate
|
||||
end associate
|
||||
end subroutine volt2RAIJU
|
||||
|
||||
@@ -299,21 +263,22 @@ submodule (volttypes) raijuCplTypesSub
|
||||
endif
|
||||
enddo
|
||||
|
||||
|
||||
! Mock up cold electrons balancing hot protons and see if it produces meaningful flux
|
||||
call InterpShellVar_TSC_pnt(sh, State%Den_avg(idx_proton) , th, ph, d_ion)
|
||||
call InterpShellVar_TSC_pnt(sh, State%Press_avg(idx_proton) , th, ph, p_ion)
|
||||
pie_frac = 0.05 ! Fraction of ion pressure contained by these neutralizing electrons
|
||||
pe_kT = DP2kT(d_ion, p_ion*pie_frac) ! [keV]
|
||||
pe_nflux = imP(RAI_ENFLX)*d_ion/d_hot ! Scale number flux to same loss processes except there were d_ion density instead of d_electron density
|
||||
pe_eflux = (pe_kT*kev2erg)*pe_nflux ! [erg/cm^2/s]
|
||||
if (pe_eflux > imP(RAI_EFLUX)) then ! Use in place of normal flux only if energy flux for these are greater than hot electron channel fluxes
|
||||
imP(RAI_EFLUX) = pe_eflux
|
||||
imP(RAI_ENFLX) = pe_nflux
|
||||
imP(RAI_EDEN ) = d_ion*1.0e6 ! [#/m^3]
|
||||
imP(RAI_EPRE ) = p_ion*pie_frac*1.0e-9 ! [Pa]
|
||||
endif
|
||||
endif
|
||||
!K: Removing this code for now, should be rewritten to use the MHD D,P => precip routines
|
||||
! call InterpShellVar_TSC_pnt(sh, State%Den_avg(idx_proton) , th, ph, d_ion)
|
||||
! call InterpShellVar_TSC_pnt(sh, State%Press_avg(idx_proton) , th, ph, p_ion)
|
||||
! pie_frac = 0.05 ! Fraction of ion pressure contained by these neutralizing electrons
|
||||
! pe_kT = DP2kT(d_ion, p_ion*pie_frac) ! [keV]
|
||||
! pe_nflux = imP(RAI_ENFLX)*d_ion/d_hot ! Scale number flux to same loss processes except there were d_ion density instead of d_electron density
|
||||
! pe_eflux = (pe_kT*kev2erg)*pe_nflux ! [erg/cm^2/s]
|
||||
! if (pe_eflux > imP(RAI_EFLUX)) then ! Use in place of normal flux only if energy flux for these are greater than hot electron channel fluxes
|
||||
! imP(RAI_EFLUX) = pe_eflux
|
||||
! imP(RAI_ENFLX) = pe_nflux
|
||||
! imP(RAI_EDEN ) = d_ion*1.0e6 ! [#/m^3]
|
||||
! imP(RAI_EPRE ) = p_ion*pie_frac*1.0e-9 ! [Pa]
|
||||
! endif
|
||||
|
||||
endif !spcList(s)%spcType == X
|
||||
enddo
|
||||
! derive mean energy where nflux is non-trivial.
|
||||
if (imP(RAI_ENFLX) > TINY) imP(RAI_EAVG) = imP(RAI_EFLUX)/imP(RAI_ENFLX) * erg2kev ! Avg E [keV]
|
||||
|
||||
@@ -356,9 +356,10 @@ module voltapp_mpi
|
||||
|
||||
if(.not. vApp%doSerialMHD) call vApp%gApp%StartUpdateMhdData(vApp)
|
||||
|
||||
call Tic("DeepUpdate")
|
||||
call Tic("DeepUpdate",.true.)
|
||||
call DeepUpdate_mpi(vApp)
|
||||
call Toc("DeepUpdate")
|
||||
call Toc("DeepUpdate",.true.)
|
||||
vApp%ts = vApp%ts + 1
|
||||
|
||||
if(vApp%doSerialMHD) call vApp%gApp%StartUpdateMhdData(vApp)
|
||||
|
||||
@@ -404,8 +405,6 @@ module voltapp_mpi
|
||||
|
||||
! only do imag after spinup
|
||||
if(vApp%doDeep .and. vApp%time >= 0) then
|
||||
call Tic("DeepUpdate", .true.)
|
||||
|
||||
if(vApp%useHelpers) call vhReqStep(vApp)
|
||||
|
||||
! instead of PreDeep, use Tube Helpers and replicate other calls
|
||||
@@ -442,7 +441,6 @@ module voltapp_mpi
|
||||
call DoImag(vApp)
|
||||
|
||||
vApp%deepProcessingInProgress = .true.
|
||||
call Toc("DeepUpdate", .true.)
|
||||
elseif(vApp%doDeep) then
|
||||
vApp%gApp%Grid%Gas0 = 0
|
||||
!Load TM03 into Gas0 for ingestion during spinup
|
||||
@@ -459,7 +457,6 @@ module voltapp_mpi
|
||||
|
||||
! only do imag after spinup with deep enabled
|
||||
if(vApp%doDeep .and. vApp%time >= 0) then
|
||||
call Tic("DeepUpdate", .true.)
|
||||
|
||||
do while(SquishBlocksRemain(vApp))
|
||||
call Tic("Squish",.true.)
|
||||
@@ -477,7 +474,6 @@ module voltapp_mpi
|
||||
|
||||
call SquishEnd(vApp)
|
||||
call PostDeep(vApp, vApp%gApp)
|
||||
call Toc("DeepUpdate", .true.)
|
||||
endif
|
||||
|
||||
end subroutine endDeep
|
||||
@@ -496,11 +492,9 @@ module voltapp_mpi
|
||||
if(.not. vApp%deepProcessingInProgress) return
|
||||
|
||||
if(SquishBlocksRemain(vApp)) then
|
||||
call Tic("DeepUpdate")
|
||||
call Tic("Squish",.true.)
|
||||
call DoSquishBlock(vApp)
|
||||
call Toc("Squish",.true.)
|
||||
call Toc("DeepUpdate")
|
||||
endif
|
||||
|
||||
if(.not. SquishBlocksRemain(vApp)) then
|
||||
|
||||
@@ -99,6 +99,8 @@ module voltapp
|
||||
|
||||
! adjust XMl reader root
|
||||
call xmlInp%SetRootStr('Kaiju/Voltron')
|
||||
! Make sure verbosity is still right after others do stuff with the reader
|
||||
call xmlInp%SetVerbose(vApp%isLoud)
|
||||
|
||||
!Initialize planet information
|
||||
call getPlanetParams(vApp%planet, xmlInp)
|
||||
@@ -209,7 +211,7 @@ module voltapp
|
||||
gApp%Model%t = vApp%time / gApp%Model%Units%gT0
|
||||
gApp%State%time = gApp%Model%t
|
||||
|
||||
call genVoltShellGrid(vApp, xmlInp)
|
||||
call genVoltShellGrid(vApp, xmlInp, gApp%Grid%Nkp)
|
||||
call initVoltState(vApp)
|
||||
|
||||
endif
|
||||
@@ -334,9 +336,10 @@ module voltapp
|
||||
! update the next predicted coupling interval
|
||||
vApp%DeepT = vApp%DeepT + vApp%DeepDT
|
||||
|
||||
call Tic("DeepUpdate")
|
||||
call Tic("DeepUpdate",.true.)
|
||||
call DeepUpdate(vApp, vApp%gApp)
|
||||
call Toc("DeepUpdate")
|
||||
call Toc("DeepUpdate",.true.)
|
||||
vApp%ts = vApp%ts + 1
|
||||
|
||||
call vApp%gApp%StartUpdateMhdData(vApp)
|
||||
|
||||
@@ -753,11 +756,14 @@ module voltapp
|
||||
end subroutine init_volt2Chmp
|
||||
|
||||
|
||||
subroutine genVoltShellGrid(vApp, xmlInp)
|
||||
subroutine genVoltShellGrid(vApp, xmlInp, gamRes)
|
||||
class(voltApp_T) , intent(inout) :: vApp
|
||||
type(XML_Input_T), intent(in) :: xmlInp
|
||||
integer, intent(in) :: gamRes
|
||||
|
||||
character(len=strLen) :: gType
|
||||
integer :: Nt_def, Np_def
|
||||
!! Default number of active cells in theta and phi unless xml says otherwise
|
||||
integer :: Nt, Np
|
||||
!! Number of active cells in theta and phi
|
||||
integer :: Ng
|
||||
@@ -785,8 +791,30 @@ module voltapp
|
||||
! Note: Nt is for a single hemisphere, we will manually double it in a minute
|
||||
! TODO: This means we will always have even number of total cells, and a cell interfce right on the equator
|
||||
! Can upgrade to allow for odd number later
|
||||
call xmlInp%Set_Val(Nt, "grid/Nt", 180 ) ! 1 deg res default for uniform grid
|
||||
call xmlInp%Set_Val(Np, "grid/Np", 360) ! 1 deg res default
|
||||
|
||||
! First determine defaults
|
||||
if (gamRes<=64) then
|
||||
! DBL
|
||||
Nt_def = 90
|
||||
Np_def = 180
|
||||
else if (gamRes<=128) then
|
||||
! QUAD
|
||||
Nt_def = 180
|
||||
Np_def = 360
|
||||
else if (gamRes<=256) then
|
||||
! OCT
|
||||
Nt_def = 360
|
||||
Np_def = 720
|
||||
else
|
||||
! HEX or above
|
||||
! Idk good luck
|
||||
Nt_def = 540
|
||||
Np_def = 1440
|
||||
endif
|
||||
|
||||
|
||||
call xmlInp%Set_Val(Nt, "grid/Nt", Nt_def) ! 1 deg res default for uniform grid
|
||||
call xmlInp%Set_Val(Np, "grid/Np", Np_def) ! 1 deg res default
|
||||
! Ghost cells
|
||||
call xmlInp%Set_Val(Ng, "grid/Ng", 4) ! # of ghosts in every direction
|
||||
nGhosts = 0
|
||||
|
||||
@@ -389,7 +389,7 @@ module voltio
|
||||
type(IOVAR_T), dimension(MAXVOLTIOVAR) :: IOVars
|
||||
real(rp) :: symh
|
||||
|
||||
integer :: is,ie,js,je
|
||||
integer :: is,ie,js,je,nClkSteps
|
||||
real(rp) :: Csijk,Con(NVAR)
|
||||
real(rp) :: BSDst0,AvgBSDst,DPSDst,BSSMRs(4)
|
||||
integer, dimension(4) :: outSGVBnds_corner
|
||||
@@ -444,6 +444,16 @@ module voltio
|
||||
call AddOutVar(IOVars,"MJD" ,vApp%MJD)
|
||||
call AddOutVar(IOVars,"timestep",vApp%ts)
|
||||
|
||||
!Performance metrics
|
||||
nClkSteps = readNCalls('DeepUpdate')
|
||||
call AddOutVar(IOVars,"_perf_stepTime",readClock(1)/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_deepUpdateTime",readClock(1)/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_gamTime", readClock('GameraSync')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_squishTime", (readClock('Squish')+readClock('VoltHelpers'))/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_imagTime", readClock('InnerMag')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_mixTime", readClock('ReMIX')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_tubesTime", readClock('VoltTubes')/nClkSteps)
|
||||
call AddOutVar(IOVars,"_perf_ioTime", readClock('IO')/nClkSteps)
|
||||
|
||||
! voltState stuff
|
||||
call AddOutSGV(IOVars, "Potential_total", vApp%State%potential_total, &
|
||||
|
||||
@@ -20,11 +20,11 @@ def create_command_line_parser():
|
||||
parser = argparse.ArgumentParser(description="Script to help setup automated tests within a kaiju repo")
|
||||
|
||||
parser.add_argument(
|
||||
"-A", required=True,
|
||||
"-A", default="",
|
||||
help="Charge code to use when running tests."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-ce", required=True,
|
||||
"-ce", default="",
|
||||
help="Conda environment name to load with conda module"
|
||||
)
|
||||
parser.add_argument(
|
||||
@@ -80,23 +80,58 @@ def main():
|
||||
|
||||
# Parse the command-line arguments.
|
||||
args = parser.parse_args()
|
||||
|
||||
# Adjust test options
|
||||
if args.all:
|
||||
args.unitTests = True
|
||||
args.weeklyDash = True
|
||||
args.compTests = True
|
||||
args.compTestsFull = True
|
||||
args.buildTests = True
|
||||
args.icTests = True
|
||||
args.intelChecks = True
|
||||
args.reproTests = True
|
||||
|
||||
if args.compTestsFull:
|
||||
args.compTests = False
|
||||
|
||||
if not (args.unitTests or args.weeklyDash or args.compTests or args.compTestsFull or
|
||||
args.buildTests or args.icTests or args.intelChecks or args.reproTests):
|
||||
parser.print_help()
|
||||
exit()
|
||||
|
||||
# find repo home directory
|
||||
called_from = os.path.dirname(os.path.abspath(__file__))
|
||||
os.chdir(called_from)
|
||||
os.chdir('..')
|
||||
homeDir = os.getcwd()
|
||||
|
||||
|
||||
# Check for necessary environment variables
|
||||
if 'KAIJUHOME' not in os.environ:
|
||||
print("The setupEnvironment.sh script must be sourced for the repo this script resides in before calling it.")
|
||||
if len(args.ce) == 0 and 'CONDA_DEFAULT_ENV' not in os.environ:
|
||||
print("A conda environment name was not supplied, and a currently loaded conda environment could not be determined.")
|
||||
print("Please either supply the name of a conda environment with the '-ce <name>' option,")
|
||||
print(" or load an entironment before running this script, and it should be automatically found.")
|
||||
exit()
|
||||
elif len(args.ce) == 0:
|
||||
args.ce = os.environ['CONDA_DEFAULT_ENV']
|
||||
print(f"Automatically setting conda environment to {args.ce}")
|
||||
if len(args.A) == 0 and (args.unitTests or args.weeklyDash or
|
||||
args.compTests or args.compTestsFull or args.intelChecks or args.reproTests):
|
||||
print("A charge code with not supplied, but the requested tests require one.")
|
||||
print("Please supply a charge code with the -A # option.")
|
||||
exit()
|
||||
if 'KAIJUHOME' not in os.environ:
|
||||
os.environ['KAIJUHOME'] = homeDir
|
||||
print(f"Running tests out of local git repository: {homeDir}")
|
||||
if pathlib.Path(homeDir).resolve() != pathlib.Path(os.environ['KAIJUHOME']).resolve():
|
||||
print("The setupEnvironment.sh script must be sourced for the repo this script resides in before calling it.")
|
||||
exit()
|
||||
if 'KAIPYHOME' not in os.environ:
|
||||
print("The setupEnvironment.sh script for ANY kaipy repo must be sourced before calling this.")
|
||||
if 'KAIPYHOME' not in os.environ and (args.weeklyDash or args.compTests or args.compTestsFull):
|
||||
print("The 'KAIPYHOME' environment variable was not set, but the requested tests require it.")
|
||||
print("The setupEnvironment.sh script for ANY kaipy repo must be sourced before running these tests.")
|
||||
exit()
|
||||
elif 'KAIPYHOME' not in os.environ:
|
||||
os.environ['KAIPYHOME'] = ""
|
||||
|
||||
# Set environment variables
|
||||
os.environ['MAGE_TEST_ROOT'] = homeDir
|
||||
@@ -124,22 +159,10 @@ def main():
|
||||
|
||||
|
||||
print(f"Running tests on branch {gitBranch}")
|
||||
print(f"Using charge code {args.A} with priority {args.p}")
|
||||
print(f"Running in folder {test_set_dir}")
|
||||
|
||||
# Adjust test options
|
||||
if args.all:
|
||||
args.unitTests = True
|
||||
args.weeklyDash = True
|
||||
args.compTests = True
|
||||
args.compTestsFull = True
|
||||
args.buildTests = True
|
||||
args.icTests = True
|
||||
args.intelChecks = True
|
||||
args.reproTests = True
|
||||
|
||||
if args.compTestsFull:
|
||||
args.compTests = False
|
||||
if len(args.A) > 0:
|
||||
print(f"Using charge code {args.A} with priority {args.p}")
|
||||
print(f"Running in folder test_runs/{test_set_dir}")
|
||||
print("")
|
||||
|
||||
# Run Tests
|
||||
if args.unitTests:
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
<coupling dtCouple="5.0" imType="RAIJU" doQkSquish="T" qkSquishStride="2" doAsyncCoupling="F"/>
|
||||
<restart dtRes="10800.0"/>
|
||||
<imag doInit="T"/>
|
||||
<threading NumTh="64"/>
|
||||
<threading NumTh="128"/>
|
||||
<!-- without quick squish, estimated 13 helpers required -->
|
||||
<helpers numHelpers="2" useHelpers="T" doSquishHelp="T"/>
|
||||
</VOLTRON>
|
||||
|
||||
@@ -9,6 +9,8 @@ module testVoltGridGen
|
||||
implicit none
|
||||
|
||||
character(len=strLen) :: xmlName = 'voltGridTests.xml'
|
||||
integer, parameter :: gamRes = 64
|
||||
!! genVoltShellGrid expects a gamera resolution to determine defaults for its own
|
||||
|
||||
contains
|
||||
|
||||
@@ -36,7 +38,7 @@ module testVoltGridGen
|
||||
call xmlInp%Set_Val(Nt, "grid/Nt", -1) ! -1 so things blow up if xml isn't set properly
|
||||
call xmlInp%Set_Val(Np, "grid/Np", -1) ! -1 so things blow up if xml isn't set properly
|
||||
|
||||
call genVoltShellGrid(vApp, xmlInp)
|
||||
call genVoltShellGrid(vApp, xmlInp, gamRes)
|
||||
|
||||
@assertEqual(vApp%shGrid%Nt, 2*Nt, "Wrong amount of theta cells")
|
||||
@assertEqual(vApp%shGrid%Np, Np , "Wrong amount of phi cells")
|
||||
@@ -62,7 +64,7 @@ module testVoltGridGen
|
||||
call xmlInp%Set_Val(Nt, "grid/Nt", -1) ! -1 so things blow up if xml isn't set properly
|
||||
call xmlInp%Set_Val(Np, "grid/Np", -1) ! -1 so things blow up if xml isn't set properly
|
||||
|
||||
call genVoltShellGrid(vApp, xmlInp)
|
||||
call genVoltShellGrid(vApp, xmlInp, gamRes)
|
||||
|
||||
@assertEqual(2*Nt, vApp%shGrid%Nt, "Wrong amount of theta cells")
|
||||
@assertEqual( Np, vApp%shGrid%Np, "Wrong amount of phi cells")
|
||||
|
||||
Reference in New Issue
Block a user