mirror of
https://github.com/JHUAPL/kaiju.git
synced 2026-01-08 22:58:05 -05:00
Merged development into dev_atiken_update
This commit is contained in:
0
scripts/makeitso/makeitso.py
Normal file → Executable file
0
scripts/makeitso/makeitso.py
Normal file → Executable file
@@ -15,7 +15,7 @@ module arrayutil
|
||||
|
||||
|
||||
subroutine fillArray1D_R(array, val)
|
||||
real(rp), dimension(:), allocatable, intent(inout) :: array
|
||||
real(rp), dimension(:), intent(inout) :: array
|
||||
real(rp), intent(in) :: val
|
||||
|
||||
integer :: lbds(1),ubds(1),i
|
||||
@@ -37,7 +37,7 @@ module arrayutil
|
||||
end subroutine fillArray1D_R
|
||||
|
||||
subroutine fillArray1D_I(array, val)
|
||||
integer, dimension(:), allocatable, intent(inout) :: array
|
||||
integer, dimension(:), intent(inout) :: array
|
||||
integer, intent(in) :: val
|
||||
|
||||
integer :: lbds(1),ubds(1),i
|
||||
@@ -59,7 +59,7 @@ module arrayutil
|
||||
end subroutine fillArray1D_I
|
||||
|
||||
subroutine fillArray2D_R(array, val)
|
||||
real(rp), dimension(:,:), allocatable, intent(inout) :: array
|
||||
real(rp), dimension(:,:), intent(inout) :: array
|
||||
real(rp), intent(in) :: val
|
||||
|
||||
integer :: lbds(2),ubds(2),i,j
|
||||
@@ -83,7 +83,7 @@ module arrayutil
|
||||
end subroutine fillArray2D_R
|
||||
|
||||
subroutine fillArray2D_I(array, val)
|
||||
integer, dimension(:,:), allocatable, intent(inout) :: array
|
||||
integer, dimension(:,:), intent(inout) :: array
|
||||
integer, intent(in) :: val
|
||||
|
||||
integer :: lbds(2),ubds(2),i,j
|
||||
@@ -107,7 +107,7 @@ module arrayutil
|
||||
end subroutine fillArray2D_I
|
||||
|
||||
subroutine fillArray3D_R(array, val)
|
||||
real(rp), dimension(:,:,:), allocatable, intent(inout) :: array
|
||||
real(rp), dimension(:,:,:), intent(inout) :: array
|
||||
real(rp), intent(in) :: val
|
||||
|
||||
integer :: lbds(3),ubds(3),i,j,k
|
||||
@@ -133,7 +133,7 @@ module arrayutil
|
||||
end subroutine fillArray3D_R
|
||||
|
||||
subroutine fillArray3D_I(array, val)
|
||||
integer, dimension(:,:,:), allocatable, intent(inout) :: array
|
||||
integer, dimension(:,:,:), intent(inout) :: array
|
||||
integer, intent(in) :: val
|
||||
|
||||
integer :: lbds(3),ubds(3),i,j,k
|
||||
@@ -159,7 +159,7 @@ module arrayutil
|
||||
end subroutine fillArray3D_I
|
||||
|
||||
subroutine fillArray4D_R(array, val)
|
||||
real(rp), dimension(:,:,:,:), allocatable, intent(inout) :: array
|
||||
real(rp), dimension(:,:,:,:), intent(inout) :: array
|
||||
real(rp), intent(in) :: val
|
||||
|
||||
integer :: lbds(4),ubds(4),i,j,k
|
||||
@@ -185,7 +185,7 @@ module arrayutil
|
||||
end subroutine fillArray4D_R
|
||||
|
||||
subroutine fillArray4D_I(array, val)
|
||||
integer, dimension(:,:,:,:), allocatable, intent(inout) :: array
|
||||
integer, dimension(:,:,:,:), intent(inout) :: array
|
||||
integer, intent(in) :: val
|
||||
|
||||
integer :: lbds(4),ubds(4),i,j,k
|
||||
@@ -211,7 +211,7 @@ module arrayutil
|
||||
end subroutine fillArray4D_I
|
||||
|
||||
subroutine fillArray5D_R(array, val)
|
||||
real(rp), dimension(:,:,:,:,:), allocatable, intent(inout) :: array
|
||||
real(rp), dimension(:,:,:,:,:), intent(inout) :: array
|
||||
real(rp), intent(in) :: val
|
||||
|
||||
integer :: lbds(5),ubds(5),i,j,k
|
||||
@@ -237,7 +237,7 @@ module arrayutil
|
||||
end subroutine fillArray5D_R
|
||||
|
||||
subroutine fillArray5D_I(array, val)
|
||||
integer, dimension(:,:,:,:,:), allocatable, intent(inout) :: array
|
||||
integer, dimension(:,:,:,:,:), intent(inout) :: array
|
||||
integer, intent(in) :: val
|
||||
|
||||
integer :: lbds(5),ubds(5),i,j,k
|
||||
|
||||
@@ -70,6 +70,7 @@ module raijudefs
|
||||
real(rp), parameter :: def_cfl = 0.3
|
||||
real(rp), parameter :: cflMax = 0.3
|
||||
logical, parameter :: def_doUseVelLRs = .true.
|
||||
logical, parameter :: def_doSmoothGrads = .true.
|
||||
|
||||
! Domain limits
|
||||
! Buffer not allowed beyond min of maxTail and maxSun
|
||||
|
||||
@@ -147,6 +147,11 @@ module raijutypes
|
||||
|
||||
|
||||
!--- State ---!
|
||||
logical :: doCS_next_preAdv = .false.
|
||||
!! Signal to run coldstart next time raiju preAdvances
|
||||
real(rp) :: modelDst_next_preAdv = 0.0_rp
|
||||
!! Target Dst [nT] when we run coldstart next
|
||||
|
||||
logical :: doneFirstCS = .false.
|
||||
!! Have we executed once already?
|
||||
real(rp) :: lastEval = -1*HUGE
|
||||
@@ -221,6 +226,8 @@ module raijutypes
|
||||
!! For debug
|
||||
logical :: writeGhosts
|
||||
!! For debug
|
||||
logical :: doSmoothGrads
|
||||
!! Whether or not we smooth variables (bvol and electric potential) before taking gradients
|
||||
logical :: doClockConsoleOut
|
||||
!! If we are driving, output clock info
|
||||
logical :: doOutput_potGrads
|
||||
|
||||
@@ -65,22 +65,19 @@ module chmpfields
|
||||
allocate(By(Nip,Njp,Nkp))
|
||||
allocate(Bz(Nip,Njp,Nkp))
|
||||
|
||||
allocate(Vx(Nip,Njp,Nkp,0:Model%nSpc))
|
||||
allocate(Vy(Nip,Njp,Nkp,0:Model%nSpc))
|
||||
allocate(Vz(Nip,Njp,Nkp,0:Model%nSpc))
|
||||
|
||||
if (Model%doMHD) then
|
||||
allocate(D (Nip,Njp,Nkp,0:Model%nSpc))
|
||||
allocate(P (Nip,Njp,Nkp,0:Model%nSpc))
|
||||
allocate(Vx(Nip,Njp,Nkp,0:Model%nSpc))
|
||||
allocate(Vy(Nip,Njp,Nkp,0:Model%nSpc))
|
||||
allocate(Vz(Nip,Njp,Nkp,0:Model%nSpc))
|
||||
else
|
||||
allocate(Vx(Nip,Njp,Nkp,0))
|
||||
allocate(Vy(Nip,Njp,Nkp,0))
|
||||
allocate(Vz(Nip,Njp,Nkp,0))
|
||||
endif
|
||||
|
||||
if (Model%doJ) then
|
||||
allocate(Jx(Nip,Njp,Nkp))
|
||||
allocate(Jy(Nip,Njp,Nkp))
|
||||
allocate(Jz(Nip,Njp,Nkp))
|
||||
|
||||
endif
|
||||
|
||||
!------------
|
||||
|
||||
@@ -5,6 +5,7 @@ module raijuBCs
|
||||
use raijutypes
|
||||
use raijuetautils
|
||||
use raijudomain
|
||||
use raijuColdStartHelper
|
||||
|
||||
implicit none
|
||||
|
||||
@@ -29,12 +30,15 @@ module raijuBCs
|
||||
doWholeDomain = .false.
|
||||
endif
|
||||
|
||||
! Now that topo is set, we can calculate active domain
|
||||
call setActiveDomain(Model, Grid, State)
|
||||
|
||||
call calcMomentIngestionLocs(Model, Grid, State, doWholeDomain, doMomentIngest)
|
||||
call applyMomentIngestion(Model, Grid, State, doMomentIngest)
|
||||
|
||||
if (State%coldStarter%doCS_next_preAdv) then
|
||||
call raijuGeoColdStart(Model, Grid, State, State%t, State%coldStarter%modelDst_next_preAdv, doAccumulateO=.true.)
|
||||
State%coldStarter%doCS_next_preAdv = .false.
|
||||
endif
|
||||
|
||||
|
||||
if (Model%doActiveShell ) then
|
||||
! Do first round of determining active shells for each k
|
||||
@@ -64,9 +68,18 @@ module raijuBCs
|
||||
doMomentIngest = .false.
|
||||
! Determine where to do BCs
|
||||
if(doWholeDomain) then
|
||||
where (State%active .ne. RAIJUINACTIVE)
|
||||
doMomentIngest = .true.
|
||||
end where
|
||||
!where (State%active .ne. RAIJUINACTIVE)
|
||||
! doMomentIngest = .true.
|
||||
!end where
|
||||
associate(sh=>Grid%shGrid)
|
||||
do j=sh%jsg,sh%jeg
|
||||
do i=sh%isg,sh%ieg
|
||||
if (State%active(i,j) .ne. RAIJUINACTIVE) then
|
||||
doMomentIngest(i,j) = .true.
|
||||
endif
|
||||
enddo
|
||||
enddo
|
||||
end associate
|
||||
else
|
||||
|
||||
associate(sh=>Grid%shGrid)
|
||||
@@ -118,7 +131,6 @@ module raijuBCs
|
||||
psphIdx = spcIdx(Grid, F_PSPH)
|
||||
eleIdx = spcIdx(Grid, F_HOTE)
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j,s,fIdx,fm,vm,kT,etaBelow,tmp_kti,tmp_kte,eMin,tmp_D,tmp_P)
|
||||
do j=Grid%shGrid%jsg,Grid%shGrid%jeg
|
||||
do i=Grid%shGrid%isg,Grid%shGrid%ieg
|
||||
@@ -215,8 +227,8 @@ module raijuBCs
|
||||
press2D = State%Pavg (i-2:i+2, j-2:j+2, fIdx)
|
||||
wgt2D = State%domWeights(i-2:i+2, j-2:j+2)
|
||||
isG2D = State%active (i-2:i+2, j-2:j+2) .ne. RAIJUINACTIVE
|
||||
D = sum(den2D * wgt2D, mask=isG2D)/sum(wgt2D, mask=isG2D)
|
||||
P = sum(press2D* wgt2D, mask=isG2D)/sum(wgt2D, mask=isG2D)
|
||||
D = sum(den2D * wgt2D, mask=isG2D)/max( sum(wgt2D, mask=isG2D), TINY)
|
||||
P = sum(press2D* wgt2D, mask=isG2D)/max( sum(wgt2D, mask=isG2D), TINY)
|
||||
endif
|
||||
end subroutine getDomWeightedMoments
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ module raijuColdStartHelper
|
||||
use raijutypes
|
||||
use imaghelper
|
||||
use earthhelper
|
||||
use arrayutil
|
||||
|
||||
use raijuetautils
|
||||
use raijuloss_CX
|
||||
@@ -32,6 +33,8 @@ module raijuColdStartHelper
|
||||
call iXML%Set_Val(coldStarter%tEnd,'coldStarter/tEnd',coldStarter%evalCadence-TINY) ! Don't do any updates as default
|
||||
endif
|
||||
|
||||
coldStarter%doneFirstCS = .false.
|
||||
|
||||
end subroutine initRaijuColdStarter
|
||||
|
||||
|
||||
@@ -41,7 +44,7 @@ module raijuColdStartHelper
|
||||
! Worker routines
|
||||
!------
|
||||
|
||||
subroutine raijuGeoColdStart(Model, Grid, State, t0, dstModel)
|
||||
subroutine raijuGeoColdStart(Model, Grid, State, t0, dstModel, doAccumulateO)
|
||||
!! Cold start RAIJU assuming we are at Earth sometime around 21st century
|
||||
type(raijuModel_T), intent(in) :: Model
|
||||
type(raijuGrid_T), intent(in) :: Grid
|
||||
@@ -50,26 +53,31 @@ module raijuColdStartHelper
|
||||
!! Target time to pull SW values from
|
||||
real(rp), intent(in) :: dstModel
|
||||
!! Current dst of global model
|
||||
logical, optional, intent(in) :: doAccumulateO
|
||||
!! If true, keep State%eta and add coldstart to it.
|
||||
!! If false, replace State%eta with coldStart info
|
||||
!! Default: false
|
||||
|
||||
logical :: isFirstCS
|
||||
logical :: doInitRC, doAccumulate
|
||||
integer :: i,j,k
|
||||
integer :: s, sIdx_p, sIdx_e
|
||||
real(rp) :: dstReal, dstTarget
|
||||
real(rp) :: dps_current, dps_preCX, dps_postCX, dps_rescale, dps_ele
|
||||
real(rp) :: etaScale
|
||||
logical, dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg) :: isGood
|
||||
logical , dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg) :: isGood
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg, Grid%Nk) :: etaCS
|
||||
|
||||
associate(cs=>State%coldStarter)
|
||||
!write(*,*)"Coldstart running..."
|
||||
|
||||
|
||||
isFirstCS = .not. State%coldStarter%doneFirstCS
|
||||
|
||||
if (.not. isFirstCS .and. .not. cs%doUpdate) then
|
||||
return
|
||||
if (present(doAccumulateO)) then
|
||||
doAccumulate = doAccumulateO
|
||||
else
|
||||
doAccumulate = .false.
|
||||
endif
|
||||
|
||||
call fillArray(etaCS, 0.0_rp)
|
||||
|
||||
where (State%active .eq. RAIJUACTIVE)
|
||||
associate(cs=>State%coldStarter)
|
||||
|
||||
where (State%active .ne. RAIJUINACTIVE)
|
||||
isGood = .true.
|
||||
elsewhere
|
||||
isGood = .false.
|
||||
@@ -77,102 +85,154 @@ module raijuColdStartHelper
|
||||
|
||||
sIdx_p = spcIdx(Grid, F_HOTP)
|
||||
sIdx_e = spcIdx(Grid, F_HOTE)
|
||||
|
||||
|
||||
if (isFirstCS) then
|
||||
! Start by nuking all etas we will set up ourselves
|
||||
do s=1,Grid%nSpc
|
||||
!! Skip plasmashere, let that be handled on its own
|
||||
!if ( Grid%spc(s)%flav == F_PSPH) then
|
||||
! continue
|
||||
!endif
|
||||
State%eta(:,:,Grid%spc(s)%kStart:Grid%spc(s)%kEnd) = 0.0
|
||||
enddo
|
||||
endif
|
||||
|
||||
!! Init psphere
|
||||
!if (isFirstCS .or. cs%doPsphUpdate) then
|
||||
! call setRaijuInitPsphere(Model, Grid, State, Model%psphInitKp)
|
||||
!endif
|
||||
|
||||
! Update Dst target
|
||||
dstReal = GetSWVal('symh', Model%tsF, t0)
|
||||
if (isFirstCS) then
|
||||
if (cs%doneFirstCS) then
|
||||
write(*,*)"Already coldstarted once, you shouldn't be here "
|
||||
return
|
||||
else
|
||||
! On first try, we assume there is no existing ring current, and its our job to make up the entire difference
|
||||
dstTarget = dstReal - dstModel
|
||||
|
||||
else if (t0 > (cs%lastEval + cs%evalCadence)) then
|
||||
! If we are updating, there should already be some ring current
|
||||
! If dstReal - dstModel is still < 0, we need to add ADDITIONAL pressure to get them to match
|
||||
dps_current = spcEta2DPS(Model, Grid, State, Grid%spc(sIdx_p), isGood) + spcEta2DPS(Model, Grid, State, Grid%spc(sIdx_e), isGood)
|
||||
dstTarget = dstReal - (dstModel - dps_current)
|
||||
else
|
||||
! Otherwise we have nothing to do, just chill til next update time
|
||||
return
|
||||
endif
|
||||
|
||||
cs%lastEval = t0
|
||||
cs%lastTarget = dstTarget
|
||||
cs%doneFirstCS = .true. ! Whether we do anything or not, we were at least called once
|
||||
|
||||
if (dstTarget > 0) then ! We got nothing to contribute
|
||||
! Now decide if we need to add a starter ring current
|
||||
if (dstTarget >= 0) then ! We've got nothing to contribute
|
||||
write(*,*)"RAIJU coldstart not adding starter ring current"
|
||||
return
|
||||
endif
|
||||
|
||||
if (isFirstCS) then
|
||||
! Init psphere
|
||||
call setRaijuInitPsphere(Model, Grid, State, Model%psphInitKp)
|
||||
! Init hot protons
|
||||
call raiColdStart_initHOTP(Model, Grid, State, t0, dstTarget)
|
||||
dps_preCX = spcEta2DPS(Model, Grid, State, Grid%spc(sIdx_p), isGood)
|
||||
! Hit it with some charge exchange
|
||||
if (cs%doCX) then
|
||||
call raiColdStart_applyCX(Model, Grid, State, Grid%spc(sIdx_p))
|
||||
endif
|
||||
dps_postCX = spcEta2DPS(Model, Grid, State, Grid%spc(sIdx_p), isGood)
|
||||
! Calc moments to update pressure and density
|
||||
call EvalMoments(Grid, State)
|
||||
! Use HOTP moments to set electrons
|
||||
call raiColdStart_initHOTE(Model, Grid, State)
|
||||
dps_ele = spcEta2DPS(Model, Grid, State, Grid%spc(sIdx_e), isGood)
|
||||
dps_current = dps_postCX ! Note: if using fudge we're gonna lose electrons immediately, don't include them in current dst for now
|
||||
! Init hot protons
|
||||
call raiColdStart_initHOTP(Model, Grid, State, t0, dstTarget, etaCS)
|
||||
!call raiColdStart_initHOTP_RCOnly(Model, Grid, State, t0, dstTarget, etaCS)
|
||||
dps_preCX = spcEta2DPS(Model, Grid, State%bvol_cc, etaCS, Grid%spc(sIdx_p), isGood)
|
||||
! Hit it with some charge exchange
|
||||
if (cs%doCX) then
|
||||
call raiColdStart_applyCX(Model, Grid, State, Grid%spc(sIdx_p), etaCS)
|
||||
endif
|
||||
dps_postCX = spcEta2DPS(Model, Grid, State%bvol_cc, etaCS, Grid%spc(sIdx_p), isGood)
|
||||
! Use HOTP moments to set electrons
|
||||
call raiColdStart_initHOTE(Model, Grid, State, etaCS)
|
||||
dps_ele = spcEta2DPS(Model, Grid, State%bvol_cc, etaCS, Grid%spc(sIdx_e), isGood)
|
||||
dps_current = dps_postCX ! Note: if using fudge we're gonna lose electrons immediately, don't include them in current dst for now
|
||||
|
||||
etaScale = abs(dstTarget / dps_current)
|
||||
State%eta(:,:,Grid%spc(sIdx_p)%kStart:Grid%spc(sIdx_p)%kEnd) = etaScale*State%eta(:,:,Grid%spc(sIdx_p)%kStart:Grid%spc(sIdx_p)%kEnd)
|
||||
dps_rescale = spcEta2DPS(Model, Grid, State, Grid%spc(sIdx_p), isGood)
|
||||
|
||||
if (isfirstCS) then
|
||||
write(*,*) "RAIJU Cold starting..."
|
||||
write(*,'(a,f7.2)') " Real Dst : ",dstReal
|
||||
write(*,'(a,f7.2)') " Model Dst : ",dstModel
|
||||
write(*,'(a,f7.2)') " Target DPS-Dst : ",dstTarget
|
||||
write(*,'(a,f7.2)') " Hot proton pre-loss : ",dps_preCX
|
||||
write(*,'(a,f7.2)') " post-loss : ",dps_postCX
|
||||
write(*,'(a,f7.2)') " post-rescale : ",dps_rescale
|
||||
write(*,'(a,f7.2)') " Hot electron DPS-Dst : ",dps_ele
|
||||
if (dstTarget < 0) then
|
||||
etaScale = abs(dstTarget / dps_current)
|
||||
etaCS(:,:,Grid%spc(sIdx_p)%kStart:Grid%spc(sIdx_p)%kEnd) = etaScale*etaCS(:,:,Grid%spc(sIdx_p)%kStart:Grid%spc(sIdx_p)%kEnd)
|
||||
dps_rescale = spcEta2DPS(Model, Grid, State%bvol_cc, etaCS, Grid%spc(sIdx_p), isGood)
|
||||
else
|
||||
write(*,'(a,f7.2)') " Real Dst : ",dstReal
|
||||
write(*,'(a,f7.2)') " Model Dst : ",dstModel
|
||||
write(*,'(a,f7.2)') " Current DPS-Dst : ",dps_current
|
||||
write(*,'(a,f7.2)') " Target DPS-Dst : ",dstTarget
|
||||
write(*,'(a,f7.2)') " post-rescale : ",dps_rescale
|
||||
write(*,'(a,f7.2)') " Hot electron DPS-Dst : ",dps_ele
|
||||
dps_rescale = dps_current
|
||||
endif
|
||||
|
||||
write(*,*) "RAIJU Cold starting..."
|
||||
write(*,'(a,f7.2)') " Real Dst : ",dstReal
|
||||
write(*,'(a,f7.2)') " Model Dst : ",dstModel
|
||||
write(*,'(a,f7.2)') " Target DPS-Dst : ",dstTarget
|
||||
write(*,'(a,f7.2)') " Hot proton pre-loss : ",dps_preCX
|
||||
write(*,'(a,f7.2)') " post-loss : ",dps_postCX
|
||||
write(*,'(a,f7.2)') " post-rescale : ",dps_rescale
|
||||
write(*,'(a,f7.2)') " Hot electron DPS-Dst : ",dps_ele
|
||||
|
||||
end associate
|
||||
|
||||
State%coldStarter%doneFirstCS = .true.
|
||||
! finally, put it into raiju state
|
||||
if(doAccumulate) then
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP private(i,j,k)
|
||||
do j=Grid%shGrid%jsg,Grid%shGrid%jeg
|
||||
do i=Grid%shGrid%isg,Grid%shGrid%ieg
|
||||
do k=1,Grid%Nk
|
||||
if(etaCS(i,j,k) > State%eta(i,j,k)) then
|
||||
State%eta(i,j,k) = etaCS(i,j,k)
|
||||
endif
|
||||
enddo
|
||||
enddo
|
||||
enddo
|
||||
else
|
||||
State%eta = etaCS
|
||||
endif
|
||||
|
||||
end subroutine raijuGeoColdStart
|
||||
|
||||
|
||||
subroutine raiColdStart_initHOTP(Model, Grid, State, t0, dstTarget)
|
||||
subroutine raiColdStart_initHOTP_RCOnly(Model, Grid, State, t0, dstTarget, etaCS)
|
||||
type(raijuModel_T), intent(in) :: Model
|
||||
type(raijuGrid_T), intent(in) :: Grid
|
||||
type(raijuState_T), intent(inout) :: State
|
||||
type(raijuState_T), intent(in) :: State
|
||||
real(rp), intent(in) :: t0
|
||||
!! Target time to pull SW values from
|
||||
real(rp), intent(in) :: dstTarget
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg, Grid%Nk), intent(inout) :: etaCS
|
||||
|
||||
real(rp) :: dstTarget_p
|
||||
logical :: isInTM03
|
||||
integer :: i,j,sIdx
|
||||
integer, dimension(2) :: ij_TM
|
||||
real(rp) :: vSW, dSW, dPS_emp, pPS_emp, ktPS_emp
|
||||
real(rp) :: x0_TM, y0_TM, T0_TM, Bvol0_TM, P0_ps, N0_ps
|
||||
real(rp) :: L, vm, P_rc, D_rc, kt_rc
|
||||
|
||||
sIdx = spcIdx(Grid, F_HOTP)
|
||||
associate(sh=>Grid%shGrid, spc=>Grid%spc(sIdx))
|
||||
|
||||
! Set everything to zero to start
|
||||
etaCS(:,:,spc%kStart:spc%kEnd) = 0.0_rp
|
||||
|
||||
! Scale target Dst down to account for electrons contributing stuff later
|
||||
dstTarget_p = dstTarget / (1.0 + 1.0/Model%tiote)
|
||||
call SetQTRC(dstTarget_p,doVerbO=.false.) ! This sets a global QTRC_P0 inside earthhelper.F90
|
||||
|
||||
|
||||
! Get reference TM value at -10 Re
|
||||
x0_TM = -10.0-TINY
|
||||
y0_TM = 0.0
|
||||
! Empirical temperature
|
||||
call EvalTM03([x0_TM,y0_TM,0.0_rp],N0_ps,P0_ps,isInTM03)
|
||||
T0_TM = DP2kT(N0_ps, P0_ps)
|
||||
! Model FTV
|
||||
ij_TM = minloc( sqrt( (State%xyzMincc(:,:,XDIR)-x0_TM)**2 + (State%xyzMincc(:,:,YDIR)**2) ) )
|
||||
Bvol0_TM = State%bvol_cc(ij_TM(IDIR), ij_TM(JDIR))
|
||||
|
||||
! Now set our initial density and pressure profile
|
||||
do j=sh%jsg,sh%jeg
|
||||
do i=sh%isg,sh%ie ! Note: Not setting low lat ghosts, we want them to be zero
|
||||
|
||||
if (State%active(i,j) .eq. RAIJUINACTIVE) cycle
|
||||
|
||||
L = norm2(State%xyzMincc(i,j,XDIR:YDIR))
|
||||
vm = State%bvol_cc(i,j)**(-2./3.)
|
||||
|
||||
kt_rc = T0_TM*(Bvol0_TM/State%bvol_cc(i,j))**(2./3.)
|
||||
kt_rc = min(kt_rc, 4.0*T0_TM) ! Limit cap. Not a big fan, but without cap we get stuff that's too energetic and won't go away (until FLC maybe)
|
||||
|
||||
|
||||
P_rc = P_QTRC(L) ! From earthhelper.F90
|
||||
D_rc = PkT2Den(P_rc, kt_rc)
|
||||
|
||||
|
||||
! Finally map it to HOTP etas
|
||||
call DkT2SpcEta(Model, spc, etaCS(i,j,spc%kStart:spc%kEnd), D_rc, kt_rc, vm)
|
||||
|
||||
enddo
|
||||
enddo
|
||||
|
||||
end associate
|
||||
|
||||
end subroutine raiColdStart_initHOTP_RCOnly
|
||||
|
||||
|
||||
subroutine raiColdStart_initHOTP(Model, Grid, State, t0, dstTarget, etaCS)
|
||||
type(raijuModel_T), intent(in) :: Model
|
||||
type(raijuGrid_T), intent(in) :: Grid
|
||||
type(raijuState_T), intent(in) :: State
|
||||
real(rp), intent(in) :: t0
|
||||
!! Target time to pull SW values from
|
||||
real(rp), intent(in) :: dstTarget
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg, Grid%Nk), intent(inout) :: etaCS
|
||||
|
||||
real(rp) :: dstTarget_p
|
||||
logical :: isInTM03
|
||||
@@ -192,8 +252,10 @@ module raijuColdStartHelper
|
||||
call InitTM03(Model%tsF,t0)
|
||||
|
||||
! Scale target Dst down to account for electrons contributing stuff later
|
||||
dstTarget_p = dstTarget / (1.0 + 1.0/Model%tiote)
|
||||
call SetQTRC(dstTarget_p,doVerbO=.false.) ! This sets a global QTRC_P0 inside earthhelper.F90
|
||||
if (dstTarget < 0) then
|
||||
dstTarget_p = dstTarget / (1.0 + 1.0/Model%tiote)
|
||||
call SetQTRC(dstTarget_p,doVerbO=.false.) ! This sets a global QTRC_P0 inside earthhelper.F90
|
||||
endif
|
||||
|
||||
! Get Borovsky statistical values
|
||||
vSW = abs(GetSWVal("Vx",Model%tsF,t0))
|
||||
@@ -218,7 +280,7 @@ module raijuColdStartHelper
|
||||
endif
|
||||
|
||||
! Set everything to zero to start
|
||||
State%eta(:,:,spc%kStart:spc%kEnd) = 0.0_rp
|
||||
etaCS(:,:,spc%kStart:spc%kEnd) = 0.0_rp
|
||||
|
||||
! Now set our initial density and pressure profile
|
||||
do j=sh%jsg,sh%jeg
|
||||
@@ -234,7 +296,11 @@ module raijuColdStartHelper
|
||||
|
||||
call EvalTM03_SM(State%xyzMincc(i,j,:),N0_ps,P0_ps,isInTM03)
|
||||
|
||||
P_rc = P_QTRC(L) ! From earthhelper.F90
|
||||
if (dstTarget < 0) then
|
||||
P_rc = P_QTRC(L) ! From earthhelper.F90
|
||||
else
|
||||
P_rc = 0.0
|
||||
endif
|
||||
|
||||
if (.not. isInTM03) then
|
||||
N0_ps = dPS_emp
|
||||
@@ -250,7 +316,7 @@ module raijuColdStartHelper
|
||||
endif
|
||||
|
||||
! Finally map it to HOTP etas
|
||||
call DkT2SpcEta(Model, spc, State%eta(i,j,spc%kStart:spc%kEnd), D_final, kt_rc, vm)
|
||||
call DkT2SpcEta(Model, spc, etaCS(i,j,spc%kStart:spc%kEnd), D_final, kt_rc, vm)
|
||||
|
||||
enddo
|
||||
enddo
|
||||
@@ -260,11 +326,12 @@ module raijuColdStartHelper
|
||||
end subroutine raiColdStart_initHOTP
|
||||
|
||||
|
||||
subroutine raiColdStart_applyCX(Model, Grid, State, spc)
|
||||
subroutine raiColdStart_applyCX(Model, Grid, State, spc, etaCS)
|
||||
type(raijuModel_T), intent(in) :: Model
|
||||
type(raijuGrid_T), intent(in) :: Grid
|
||||
type(raijuState_T), intent(inout) :: State
|
||||
type(raijuState_T), intent(in) :: State
|
||||
type(raijuSpecies_T), intent(in) :: spc
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg, Grid%Nk), intent(inout) :: etaCS
|
||||
|
||||
integer :: i,j,k
|
||||
type(raiLoss_CX_T) :: lossCX
|
||||
@@ -275,13 +342,12 @@ module raijuColdStartHelper
|
||||
call lossCX%doInit(Model, Grid, nullXML)
|
||||
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j,tau)
|
||||
!$OMP private(i,j,k,tau)
|
||||
do j=Grid%shGrid%jsg,Grid%shGrid%jeg
|
||||
do i=Grid%shGrid%isg,Grid%shGrid%ieg
|
||||
do k = spc%kStart,spc%kEnd
|
||||
tau = lossCX%calcTau(Model, Grid, State, i, j, k)
|
||||
State%eta(i,j,k) = State%eta(i,j,k)*exp(-tCX/tau)
|
||||
etaCS(i,j,k) = etaCS(i,j,k)*exp(-tCX/tau)
|
||||
enddo
|
||||
enddo
|
||||
enddo
|
||||
@@ -289,38 +355,49 @@ module raijuColdStartHelper
|
||||
end subroutine raiColdStart_applyCX
|
||||
|
||||
|
||||
subroutine raiColdStart_initHOTE(Model, Grid, State)
|
||||
subroutine raiColdStart_initHOTE(Model, Grid, State, etaCS)
|
||||
type(raijuModel_T), intent(in) :: Model
|
||||
type(raijuGrid_T), intent(in) :: Grid
|
||||
type(raijuState_T), intent(inout) :: State
|
||||
type(raijuState_T), intent(in) :: State
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg, Grid%Nk), intent(inout) :: etaCS
|
||||
|
||||
integer :: sIdx_e, sIdx_p
|
||||
integer :: i,j
|
||||
real(rp) :: press_p, den_p
|
||||
real(rp) :: kt_p, kt_e, den, vm
|
||||
|
||||
sIdx_p = spcIdx(Grid, F_HOTP)
|
||||
sIdx_e = spcIdx(Grid, F_HOTE)
|
||||
|
||||
|
||||
associate(spc_p=>Grid%spc(sIdx_p), spc_e=>Grid%spc(sIdx_e))
|
||||
! Set everything to zero to start
|
||||
State%eta(:,:,Grid%spc(sIdx_e)%kStart:Grid%spc(sIdx_e)%kEnd) = 0.0_rp
|
||||
etaCS(:,:,spc_e%kStart:spc_e%kEnd) = 0.0_rp
|
||||
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j,vm,den,kt_p,kt_e)
|
||||
!$OMP private(i,j,vm,den_p, press_p,kt_p,kt_e)
|
||||
do j=Grid%shGrid%jsg,Grid%shGrid%jeg
|
||||
do i=Grid%shGrid%isg,Grid%shGrid%ie ! Note: Not setting low lat ghosts, we want them to be zero
|
||||
if (State%active(i,j) .eq. RAIJUINACTIVE) cycle
|
||||
|
||||
vm = State%bvol_cc(i,j)**(-2./3.)
|
||||
den = State%Den(sIdx_p)%data(i,j)
|
||||
kt_p = DP2kT(den, State%Press(sIdx_p)%data(i,j))
|
||||
!den = State%Den(sIdx_p)%data(i,j)
|
||||
!kt_p = DP2kT(den, State%Press(sIdx_p)%data(i,j))
|
||||
|
||||
den_p = SpcEta2Den (spc_p, etaCS(i,j,spc_p%kStart:spc_p%kEnd), State%bvol_cc(i,j))
|
||||
press_p = SpcEta2Press(spc_p, etaCS(i,j,spc_p%kStart:spc_p%kEnd), State%bvol_cc(i,j))
|
||||
kt_p = DP2kT(den_p, press_p)
|
||||
|
||||
kt_e = kt_p / Model%tiote
|
||||
call DkT2SpcEta(Model, Grid%spc(sIdx_e), &
|
||||
State%eta(i,j,Grid%spc(sIdx_e)%kStart:Grid%spc(sIdx_e)%kEnd), &
|
||||
den, kt_e, vm)
|
||||
etaCS(i,j,Grid%spc(sIdx_e)%kStart:Grid%spc(sIdx_e)%kEnd), &
|
||||
den_p, kt_e, vm)
|
||||
enddo
|
||||
enddo
|
||||
|
||||
end associate
|
||||
|
||||
end subroutine raiColdStart_initHOTE
|
||||
|
||||
|
||||
|
||||
@@ -178,18 +178,19 @@ module raijuetautils
|
||||
end function SpcEta2Press
|
||||
|
||||
|
||||
function spcEta2DPS(Model, Grid, State, spc, isGood) result(dpsdst)
|
||||
function spcEta2DPS(Model, Grid, bVol_cc, eta, spc, isGood) result(dpsdst)
|
||||
!! Calculate total DPS-Dst for given species within the defined isGood domain
|
||||
type(raijuModel_T), intent(in) :: Model
|
||||
type(raijuGrid_T), intent(in) :: Grid
|
||||
type(raijuState_T), intent(in) :: State
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg), intent(in) :: bVol_cc
|
||||
real(rp), dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg, Grid%Nk), intent(in) :: eta
|
||||
type(raijuSpecies_T), intent(in) :: spc
|
||||
logical, dimension(Grid%shGrid%isg:Grid%shGrid%ieg, Grid%shGrid%jsg:Grid%shGrid%jeg), intent(in) :: isGood
|
||||
!! Eval mask, true = point is included in calculation
|
||||
|
||||
real(rp) :: dpsdst
|
||||
integer :: i,j,k
|
||||
real(rp) :: press, bVol, energyDen, energy
|
||||
integer :: i,j
|
||||
real(rp) :: press, energyDen, energy
|
||||
logical :: isDead = .false.
|
||||
|
||||
dpsdst = 0.0
|
||||
@@ -197,9 +198,8 @@ module raijuetautils
|
||||
do j=Grid%shGrid%jsg,Grid%shGrid%jeg
|
||||
do i=Grid%shGrid%isg,Grid%shGrid%ieg
|
||||
if (.not. isGood(i,j)) cycle
|
||||
bVol = State%bvol_cc(i,j)
|
||||
press = SpcEta2Press(spc, State%eta(i,j,spc%kStart:spc%kEnd), bVol) ! [nPa]
|
||||
energyDen = (press*1.0D-9) * (bVol*Model%planet%rp_m*1.0D9) * (Grid%Brcc(i,j)*1.0D-9)/kev2J ! p[J/m^3] * bVol[m/T] * B[T] = [J/m^2] * keV/J = [keV/m^2]
|
||||
press = SpcEta2Press(spc, eta(i,j,spc%kStart:spc%kEnd), bvol_cc(i,j)) ! [nPa]
|
||||
energyDen = (press*1.0D-9) * (bVol_cc(i,j)*Model%planet%rp_m*1.0D9) * (Grid%Brcc(i,j)*1.0D-9)/kev2J ! p[J/m^3] * bVol[m/T] * B[T] = [J/m^2] * keV/J = [keV/m^2]
|
||||
energy = energyDen*(Grid%areaCC(i,j)*Model%planet%ri_m**2) ! [keV/m^2]* Re^2[m^2] = [keV]
|
||||
dpsdst = dpsdst - 4.2*(1.0D-30)*energy ! [nT]
|
||||
enddo
|
||||
|
||||
@@ -16,7 +16,7 @@ module raijuIO
|
||||
|
||||
implicit none
|
||||
|
||||
integer, parameter, private :: MAXIOVAR = 70
|
||||
integer, parameter, private :: MAXIOVAR = 100
|
||||
logical, private :: doRoot = .true. !Whether root variables need to be written
|
||||
logical, private :: doFat = .false. !Whether to output lots of extra datalogical, private :: doRoot = .true. !Whether root variables need to be written
|
||||
|
||||
@@ -503,6 +503,12 @@ module raijuIO
|
||||
else
|
||||
call AddOutVar(IOVars,"cs_doneFirstCS", 0)
|
||||
endif
|
||||
if (State%coldStarter%doCS_next_preAdv) then
|
||||
call AddOutVar(IOVars,"cs_doCS_next_preAdv", 1)
|
||||
else
|
||||
call AddOutVar(IOVars,"cs_doCS_next_preAdv", 0)
|
||||
endif
|
||||
call AddOutVar(IOVars, "cs_modelDst_next_preAdv", State%coldStarter%modelDst_next_preAdv)
|
||||
call AddOutVar(IOVars, "cs_lastEval", State%coldStarter%lastEval)
|
||||
call AddOutVar(IOVars, "cs_lastTarget", State%coldStarter%lastTarget)
|
||||
|
||||
@@ -598,6 +604,8 @@ module raijuIO
|
||||
call AddInVar(IOVars,"nRes")
|
||||
call AddInVar(IOVars,"tRes")
|
||||
call AddInVar(IOVars,"isFirstCpl")
|
||||
call AddInVar(IOVars,"cs_doCS_next_preAdv")
|
||||
call AddInVar(IOVars,"cs_modelDst_next_preAdv")
|
||||
call AddInVar(IOVars,"cs_doneFirstCS")
|
||||
call AddInVar(IOVars,"cs_lastEval")
|
||||
call AddInVar(IOVars,"cs_lastTarget")
|
||||
@@ -651,12 +659,15 @@ module raijuIO
|
||||
! Coldstarter
|
||||
State%coldStarter%lastEval = GetIOReal(IOVars, "cs_lastEval")
|
||||
State%coldStarter%lastTarget = GetIOReal(IOVars, "cs_lastTarget")
|
||||
State%coldStarter%modelDst_next_preAdv = GetIOReal(IOVars, "cs_modelDst_next_preAdv")
|
||||
|
||||
! Handle boolean attributes
|
||||
tmpInt = GetIOInt(IOVars, "isFirstCpl")
|
||||
State%isFirstCpl = tmpInt .eq. 1
|
||||
tmpInt = GetIOInt(IOVars, "cs_doneFirstCS")
|
||||
State%coldStarter%doneFirstCS = tmpInt .eq. 1
|
||||
tmpInt = GetIOInt(IOVars, "cs_doCS_next_preAdv")
|
||||
State%coldStarter%doCS_next_preAdv = tmpInt .eq. 1
|
||||
|
||||
call IOArray2DFill(IOVars, "xmin" , State%xyzMin(:,:,XDIR))
|
||||
call IOArray2DFill(IOVars, "ymin" , State%xyzMin(:,:,YDIR))
|
||||
|
||||
@@ -131,7 +131,7 @@ module raijuOut
|
||||
if (maxP_MLT > 24) maxP_MLT = maxP_MLT - 24D0
|
||||
write(*,'(a,I0,a,f7.2,a,f7.2,a,f5.2,a,f5.2,a,f7.2)') ' ', &
|
||||
Grid%spc(s)%flav, ': P =', maxPress,', D =',maxDen,' @ ',maxP_L,' Rp,',maxP_MLT, &
|
||||
" MLT; DPS:",spcEta2DPS(Model, Grid, State, Grid%spc(sIdx), State%active .eq. RAIJUACTIVE)
|
||||
" MLT; DPS:",spcEta2DPS(Model, Grid, State%bvol_cc, State%eta_avg, Grid%spc(sIdx), State%active .eq. RAIJUACTIVE)
|
||||
|
||||
enddo
|
||||
write(*,'(a)',advance="no") ANSIRESET
|
||||
|
||||
@@ -36,12 +36,18 @@ module raijuPreAdvancer
|
||||
call fillArray(State%eta_avg, 0.0_rp)
|
||||
! (losses handled in updateRaiLosses)
|
||||
|
||||
! Now that topo is set, we can calculate active domain
|
||||
call setActiveDomain(Model, Grid, State)
|
||||
|
||||
! Moments to etas, initial active shell calculation
|
||||
call Tic("BCs")
|
||||
call applyRaijuBCs(Model, Grid, State, doWholeDomainO=State%isFirstCpl) ! If fullEtaMap=True, mom2eta map is applied to the whole domain
|
||||
if (State%isFirstCpl) then
|
||||
call setRaijuInitPsphere(Model, Grid, State, Model%psphInitKp)
|
||||
endif
|
||||
call Toc("BCs")
|
||||
|
||||
! Handle plasmaasphere refilling for the full step about to happen
|
||||
! Handle plasmasphere refilling for the full step about to happen
|
||||
call plasmasphereRefill(Model,Grid,State)
|
||||
|
||||
! Handle edge cases that may effect the validity of information carried over from last coupling period
|
||||
@@ -261,7 +267,7 @@ module raijuPreAdvancer
|
||||
|
||||
associate(sh=>Grid%shGrid)
|
||||
! Gauss-Green calculation of cell-averaged gradients
|
||||
call potExB(Grid%shGrid, State, pExB, doSmoothO=.true., isGCornerO=isGCorner) ! [V]
|
||||
call potExB(Grid%shGrid, State, pExB, doSmoothO=Model%doSmoothGrads, isGCornerO=isGCorner) ! [V]
|
||||
call potCorot(Model%planet, Grid%shGrid, pCorot, Model%doGeoCorot) ! [V]
|
||||
call calcGradIJ_cc(Model%planet%rp_m, Grid, isGCorner, pExB , State%gradPotE_cc , doLimO=.true. ) ! [V/m]
|
||||
call calcGradIJ_cc(Model%planet%rp_m, Grid, isGCorner, pCorot, State%gradPotCorot_cc, doLimO=.false.) ! [V/m]
|
||||
@@ -270,7 +276,7 @@ module raijuPreAdvancer
|
||||
! lambda is constant, so just need grad(V^(-2/3) )
|
||||
call calcGradVM_cc(Model%planet%rp_m, Model%planet%ri_m, Model%planet%magMoment, &
|
||||
Grid, isGCorner, State%bvol, State%gradVM_cc, &
|
||||
doSmoothO=.true., doLimO=.true.)
|
||||
doSmoothO=Model%doSmoothGrads, doLimO=.true.)
|
||||
end associate
|
||||
|
||||
end subroutine calcPotGrads_cc
|
||||
@@ -304,7 +310,6 @@ module raijuPreAdvancer
|
||||
associate(sh=>Grid%shGrid)
|
||||
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j,qLow,qHigh)
|
||||
do j=sh%jsg,sh%jeg
|
||||
do i=sh%isg,sh%ieg
|
||||
@@ -332,7 +337,6 @@ module raijuPreAdvancer
|
||||
allocate(gradQtmp(sh%isg:sh%ieg,sh%jsg:sh%jeg, 2))
|
||||
gradQtmp = gradQ
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j)
|
||||
do j=sh%jsg+1,sh%jeg-1
|
||||
do i=sh%isg+1,sh%ieg-1
|
||||
@@ -483,13 +487,19 @@ module raijuPreAdvancer
|
||||
gradVM(:,:,RAI_TH) = gradVM(:,:,RAI_TH) + dV0_dth
|
||||
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j,bVolcc)
|
||||
do j=sh%jsg,sh%jeg
|
||||
do i=sh%isg,sh%ieg
|
||||
bVolcc = toCenter2D(dV(i:i+1,j:j+1)) + DipFTV_colat(Grid%thcRp(i), B0) ! Will include smoothing of dV if enabled
|
||||
!bVolcc = toCenter2D(V(i:i+1,j:j+1))
|
||||
gradVM(i,j,:) = (-2./3.)*bVolcc**(-5./3.)*gradVM(i,j,:)
|
||||
if(all(isGcorner(i:i+1,j:j+1))) then
|
||||
!bVolcc = toCenter2D(dV(i:i+1,j:j+1)) + DipFTV_colat(Grid%thcRp(i), B0) ! Will include smoothing of dV if enabled
|
||||
bVolcc = toCenter2D(V(i:i+1,j:j+1))
|
||||
gradVM(i,j,:) = (-2./3.)*bVolcc**(-5./3.)*gradVM(i,j,:)
|
||||
else
|
||||
! gradVM should be zero for this point coming out of calcGradIJ_cc, but set to dipole value just in case
|
||||
gradVM(i,j,RAI_PH) = 0.0
|
||||
gradVM(i,j,RAI_TH) = 0.0
|
||||
!gradVM(i,j,RAI_TH) = (-2./3.)*DipFTV_colat(Grid%thcRp(i), B0)**(-5./3.)*dV0_dth(i,j)
|
||||
endif
|
||||
enddo
|
||||
enddo
|
||||
|
||||
@@ -564,7 +574,6 @@ module raijuPreAdvancer
|
||||
enddo
|
||||
! Now everyone else
|
||||
!$OMP PARALLEL DO default(shared) &
|
||||
!$OMP schedule(dynamic) &
|
||||
!$OMP private(i,j)
|
||||
do j=jsg+1,jeg
|
||||
do i=isg+1,ieg
|
||||
@@ -572,6 +581,8 @@ module raijuPreAdvancer
|
||||
enddo
|
||||
enddo
|
||||
|
||||
call wrapJcorners(sh, Vsm)
|
||||
|
||||
! Write back to provided array
|
||||
V = Vsm
|
||||
end associate
|
||||
|
||||
@@ -175,6 +175,7 @@ module raijustarter
|
||||
Model%activeDomRad = abs(Model%activeDomRad)
|
||||
|
||||
!---Solver ---!
|
||||
call iXML%Set_Val(Model%doSmoothGrads,'sim/doSmoothGrads',def_doSmoothGrads)
|
||||
call iXML%Set_Val(Model%doUseVelLRs,'sim/useVelLRs',def_doUseVelLRs)
|
||||
call iXML%Set_Val(Model%maxItersPerSec,'sim/maxIter',def_maxItersPerSec)
|
||||
call iXML%Set_Val(Model%maxOrder,'sim/maxOrder',7)
|
||||
@@ -553,6 +554,8 @@ module raijustarter
|
||||
! Similarly, set vaFrac to safe value in case stand-alone never writes to it
|
||||
State%vaFrac = 1.0
|
||||
|
||||
State%isFirstCpl = .true.
|
||||
|
||||
! Init State sub-modules
|
||||
if (Model%isSA) then
|
||||
! If we are standalone, this is the place to get coldStarter settings
|
||||
|
||||
@@ -231,7 +231,7 @@ module imag2mix_interface
|
||||
imP_avg(RAI_EDEN) = imP_avg(RAI_EDEN )/nPnts**2
|
||||
imP_avg(RAI_EPRE) = imP_avg(RAI_EPRE )/nPnts**2
|
||||
imP_avg(RAI_NPSP) = imP_avg(RAI_NPSP )/nPnts**2
|
||||
imP_avg(RAI_EAVG) = imP_avg(RAI_EFLUX) / imP_avg(RAI_ENFLX)
|
||||
imP_avg(RAI_EAVG) = imP_avg(RAI_EFLUX) / max(TINY, imP_avg(RAI_ENFLX))
|
||||
imP_avg(RAI_GTYPE) = imP_avg(RAI_GTYPE)/nGood
|
||||
imP_avg(RAI_THCON) = imP_avg(RAI_THCON)/nGood
|
||||
imP_avg(RAI_PHCON) = imP_avg(RAI_PHCON)/nGood
|
||||
|
||||
@@ -43,11 +43,7 @@ submodule (volttypes) raijuCplTypesSub
|
||||
! Update MJD with whatever voltron handed us
|
||||
! If we are restarting, this will get replaced with whatever's in file later
|
||||
App%raiApp%State%mjd = App%opt%mjd0
|
||||
write(*,*)"MJD0=",App%opt%mjd0
|
||||
if (App%opt%doColdStart) then
|
||||
! We are gonna cold start, so ignore plasma ingestion rules for first coupling
|
||||
App%raiApp%State%isFirstCpl = .false.
|
||||
endif
|
||||
|
||||
! Then allocate and initialize coupling variables based on raiju app
|
||||
call raijuCpl_init(App, xml)
|
||||
|
||||
@@ -58,61 +54,29 @@ submodule (volttypes) raijuCplTypesSub
|
||||
class(raijuCoupler_T), intent(inout) :: App
|
||||
class(voltApp_T), intent(inout) :: vApp
|
||||
|
||||
logical :: doFirstColdStart
|
||||
logical :: doUpdateColdStart
|
||||
real(rp) :: BSDst
|
||||
|
||||
doFirstColdStart = .false.
|
||||
doUpdateColdStart = .false.
|
||||
|
||||
associate(raiApp=>App%raiApp)
|
||||
|
||||
! If we are running realtime, its our job to get everything we need from vApp into raiCpl
|
||||
if (.not. App%raiApp%Model%isSA) then
|
||||
! First, determine if we should cold start, i.e. Completely reset raiju's eta's to match some target conditions
|
||||
! Determine if we should cold start before packing coupler because it will set tLastUpdate to vApp%time and then we can't do the checks we want
|
||||
! But actually do cold start after coupler packing completes so we can use real field line info
|
||||
|
||||
! Do we do our very first coldstart ever
|
||||
if (App%opt%doColdStart .and. App%tLastUpdate < 0.0 .and. vApp%time >= 0.0) then
|
||||
doFirstColdStart = .true.
|
||||
endif
|
||||
! Do we do "updates" to our coldstart during pre-conditioning period
|
||||
if(App%opt%doColdStart .and. App%tLastUpdate > 0.0 .and. vApp%time < App%startup_blendTscl) then
|
||||
doUpdateColdStart = .true.
|
||||
endif
|
||||
|
||||
call packRaijuCoupler_RT(App, vApp)
|
||||
endif
|
||||
|
||||
! Someone updated raiCpl's coupling variables by now, stuff it into RAIJU proper
|
||||
call raiCpl2RAIJU(App)
|
||||
|
||||
if (.not. raiApp%State%coldStarter%doneFirstCS .or. vApp%time < raiApp%State%coldStarter%tEnd) then
|
||||
!! Make sure we run at least once
|
||||
call setActiveDomain(raiApp%Model, raiApp%Grid, raiApp%State)
|
||||
! Calc voltron dst ourselves since vApp%BSDst is only set on console output
|
||||
call EstDST(vApp%gApp%Model,vApp%gApp%Grid,vApp%gApp%State,BSDst0=BSDst)
|
||||
call raijuGeoColdStart(raiApp%Model, raiApp%Grid, raiApp%State, vApp%time, BSDst)
|
||||
endif
|
||||
!if (doFirstColdStart) then
|
||||
! ! Its happening, everybody stay calm
|
||||
! write(*,*) "RAIJU Doing first cold start..."
|
||||
! ! NOTE: By this point we have put coupling info into raiju (e.g. bVol, xyzmin, MHD moments)
|
||||
! ! But haven't calculated active domain yet because that happens in preadvancer
|
||||
! ! So we jump in and do it here so we have it for cold starting
|
||||
! call setActiveDomain(raiApp%Model, raiApp%Grid, raiApp%State)
|
||||
! ! Calc voltron dst ourselves since vApp%BSDst is only set on console output
|
||||
! call EstDST(vApp%gApp%Model,vApp%gApp%Grid,vApp%gApp%State,BSDst0=BSDst)
|
||||
! call raijuGeoColdStart(raiApp%Model, raiApp%Grid, raiApp%State, vApp%time, BSDst, doCXO=App%doColdstartCX,doPsphO=.true.)
|
||||
!endif
|
||||
!if (doUpdateColdStart) then
|
||||
! write(*,*)"RAIJU doing update cold start at t=",vApp%time
|
||||
! write(*,*)" (calculating model BSDst,)",vApp%time
|
||||
! call setActiveDomain(raiApp%Model, raiApp%Grid, raiApp%State)
|
||||
! call EstDST(vApp%gApp%Model,vApp%gApp%Grid,vApp%gApp%State,BSDst0=BSDst)
|
||||
! call raijuGeoColdStart(raiApp%Model, raiApp%Grid, raiApp%State, vApp%time, BSDst, doCXO=App%doColdstartCX,doPsphO=.false.)
|
||||
!endif
|
||||
associate(cs=>raiApp%State%coldStarter)
|
||||
if (.not. cs%doneFirstCS .or. (cs%doUpdate .and. vApp%time < cs%tEnd) ) then
|
||||
!! Make sure we run at least once
|
||||
! Calc voltron dst ourselves since vApp%BSDst is only set on console output
|
||||
call EstDST(vApp%gApp%Model,vApp%gApp%Grid,vApp%gApp%State,BSDst0=BSDst)
|
||||
raiApp%State%coldStarter%doCS_next_preAdv = .true.
|
||||
raiApp%State%coldStarter%modelDst_next_preAdv = BSDst
|
||||
!call setActiveDomain(raiApp%Model, raiApp%Grid, raiApp%State)
|
||||
!call raijuGeoColdStart(raiApp%Model, raiApp%Grid, raiApp%State, vApp%time, BSDst)
|
||||
endif
|
||||
end associate
|
||||
end associate
|
||||
end subroutine volt2RAIJU
|
||||
|
||||
@@ -299,21 +263,22 @@ submodule (volttypes) raijuCplTypesSub
|
||||
endif
|
||||
enddo
|
||||
|
||||
|
||||
! Mock up cold electrons balancing hot protons and see if it produces meaningful flux
|
||||
call InterpShellVar_TSC_pnt(sh, State%Den_avg(idx_proton) , th, ph, d_ion)
|
||||
call InterpShellVar_TSC_pnt(sh, State%Press_avg(idx_proton) , th, ph, p_ion)
|
||||
pie_frac = 0.05 ! Fraction of ion pressure contained by these neutralizing electrons
|
||||
pe_kT = DP2kT(d_ion, p_ion*pie_frac) ! [keV]
|
||||
pe_nflux = imP(RAI_ENFLX)*d_ion/d_hot ! Scale number flux to same loss processes except there were d_ion density instead of d_electron density
|
||||
pe_eflux = (pe_kT*kev2erg)*pe_nflux ! [erg/cm^2/s]
|
||||
if (pe_eflux > imP(RAI_EFLUX)) then ! Use in place of normal flux only if energy flux for these are greater than hot electron channel fluxes
|
||||
imP(RAI_EFLUX) = pe_eflux
|
||||
imP(RAI_ENFLX) = pe_nflux
|
||||
imP(RAI_EDEN ) = d_ion*1.0e6 ! [#/m^3]
|
||||
imP(RAI_EPRE ) = p_ion*pie_frac*1.0e-9 ! [Pa]
|
||||
endif
|
||||
endif
|
||||
!K: Removing this code for now, should be rewritten to use the MHD D,P => precip routines
|
||||
! call InterpShellVar_TSC_pnt(sh, State%Den_avg(idx_proton) , th, ph, d_ion)
|
||||
! call InterpShellVar_TSC_pnt(sh, State%Press_avg(idx_proton) , th, ph, p_ion)
|
||||
! pie_frac = 0.05 ! Fraction of ion pressure contained by these neutralizing electrons
|
||||
! pe_kT = DP2kT(d_ion, p_ion*pie_frac) ! [keV]
|
||||
! pe_nflux = imP(RAI_ENFLX)*d_ion/d_hot ! Scale number flux to same loss processes except there were d_ion density instead of d_electron density
|
||||
! pe_eflux = (pe_kT*kev2erg)*pe_nflux ! [erg/cm^2/s]
|
||||
! if (pe_eflux > imP(RAI_EFLUX)) then ! Use in place of normal flux only if energy flux for these are greater than hot electron channel fluxes
|
||||
! imP(RAI_EFLUX) = pe_eflux
|
||||
! imP(RAI_ENFLX) = pe_nflux
|
||||
! imP(RAI_EDEN ) = d_ion*1.0e6 ! [#/m^3]
|
||||
! imP(RAI_EPRE ) = p_ion*pie_frac*1.0e-9 ! [Pa]
|
||||
! endif
|
||||
|
||||
endif !spcList(s)%spcType == X
|
||||
enddo
|
||||
! derive mean energy where nflux is non-trivial.
|
||||
if (imP(RAI_ENFLX) > TINY) imP(RAI_EAVG) = imP(RAI_EFLUX)/imP(RAI_ENFLX) * erg2kev ! Avg E [keV]
|
||||
|
||||
@@ -337,6 +337,11 @@ module tubehelper
|
||||
|
||||
bTube%nTrc = 0
|
||||
|
||||
!Zero pot
|
||||
bTube%pot = 0.0
|
||||
bTube%crpot = 0.0
|
||||
bTube%potc = 0.0
|
||||
|
||||
end subroutine FreshenTube
|
||||
|
||||
|
||||
|
||||
@@ -99,6 +99,8 @@ module voltapp
|
||||
|
||||
! adjust XMl reader root
|
||||
call xmlInp%SetRootStr('Kaiju/Voltron')
|
||||
! Make sure verbosity is still right after others do stuff with the reader
|
||||
call xmlInp%SetVerbose(vApp%isLoud)
|
||||
|
||||
!Initialize planet information
|
||||
call getPlanetParams(vApp%planet, xmlInp)
|
||||
@@ -209,7 +211,7 @@ module voltapp
|
||||
gApp%Model%t = vApp%time / gApp%Model%Units%gT0
|
||||
gApp%State%time = gApp%Model%t
|
||||
|
||||
call genVoltShellGrid(vApp, xmlInp)
|
||||
call genVoltShellGrid(vApp, xmlInp, gApp%Grid%Nkp)
|
||||
call initVoltState(vApp)
|
||||
|
||||
endif
|
||||
@@ -754,11 +756,14 @@ module voltapp
|
||||
end subroutine init_volt2Chmp
|
||||
|
||||
|
||||
subroutine genVoltShellGrid(vApp, xmlInp)
|
||||
subroutine genVoltShellGrid(vApp, xmlInp, gamRes)
|
||||
class(voltApp_T) , intent(inout) :: vApp
|
||||
type(XML_Input_T), intent(in) :: xmlInp
|
||||
integer, intent(in) :: gamRes
|
||||
|
||||
character(len=strLen) :: gType
|
||||
integer :: Nt_def, Np_def
|
||||
!! Default number of active cells in theta and phi unless xml says otherwise
|
||||
integer :: Nt, Np
|
||||
!! Number of active cells in theta and phi
|
||||
integer :: Ng
|
||||
@@ -786,8 +791,30 @@ module voltapp
|
||||
! Note: Nt is for a single hemisphere, we will manually double it in a minute
|
||||
! TODO: This means we will always have even number of total cells, and a cell interfce right on the equator
|
||||
! Can upgrade to allow for odd number later
|
||||
call xmlInp%Set_Val(Nt, "grid/Nt", 180 ) ! 1 deg res default for uniform grid
|
||||
call xmlInp%Set_Val(Np, "grid/Np", 360) ! 1 deg res default
|
||||
|
||||
! First determine defaults
|
||||
if (gamRes<=64) then
|
||||
! DBL
|
||||
Nt_def = 90
|
||||
Np_def = 180
|
||||
else if (gamRes<=128) then
|
||||
! QUAD
|
||||
Nt_def = 180
|
||||
Np_def = 360
|
||||
else if (gamRes<=256) then
|
||||
! OCT
|
||||
Nt_def = 360
|
||||
Np_def = 720
|
||||
else
|
||||
! HEX or above
|
||||
! Idk good luck
|
||||
Nt_def = 540
|
||||
Np_def = 1440
|
||||
endif
|
||||
|
||||
|
||||
call xmlInp%Set_Val(Nt, "grid/Nt", Nt_def) ! 1 deg res default for uniform grid
|
||||
call xmlInp%Set_Val(Np, "grid/Np", Np_def) ! 1 deg res default
|
||||
! Ghost cells
|
||||
call xmlInp%Set_Val(Ng, "grid/Ng", 4) ! # of ghosts in every direction
|
||||
nGhosts = 0
|
||||
|
||||
@@ -384,7 +384,7 @@ def create_magnetosphere_quicklook_plots(xml_files: list):
|
||||
os.chdir(results_dir)
|
||||
|
||||
# Create the quicklook plot.
|
||||
cmd = f"msphpic.py -id {runid}"
|
||||
cmd = f"msphpic -id {runid}"
|
||||
_ = subprocess.run(cmd, shell=True, check=True)
|
||||
|
||||
# Move back to the starting directory.
|
||||
@@ -464,7 +464,7 @@ def create_REMIX_quicklook_plots(xml_files: list):
|
||||
os.chdir(results_dir)
|
||||
|
||||
# Create the quicklook plots.
|
||||
cmd = f"mixpic.py -id {runid}"
|
||||
cmd = f"mixpic -id {runid}"
|
||||
_ = subprocess.run(cmd, shell=True, check=True)
|
||||
|
||||
# Add the plots to the lists.
|
||||
@@ -517,83 +517,6 @@ def merge_REMIX_quicklook_plots(plots_north: list, plots_south: list):
|
||||
return merged_plot_north, merged_plot_south
|
||||
|
||||
|
||||
def create_RCM_quicklook_plots(xml_files: list):
|
||||
"""Create the RCM quicklook plot for each run.
|
||||
|
||||
Create the RCM quicklook plot for each run.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
xml_files : list of str
|
||||
List of XML files describing each run.
|
||||
|
||||
Returns
|
||||
-------
|
||||
quicklook_plots : list of str
|
||||
Path to each quicklook file.
|
||||
|
||||
Raises
|
||||
------
|
||||
None
|
||||
"""
|
||||
# Save the current directory.
|
||||
cwd = os.getcwd()
|
||||
|
||||
# Make the RCM quicklook plot for each run.
|
||||
quicklook_plots = []
|
||||
for xml_file in xml_files:
|
||||
|
||||
# Extract the run ID.
|
||||
runid = common.extract_runid(xml_file)
|
||||
|
||||
# Compute the path to the results directory.
|
||||
results_dir = os.path.split(xml_file)[0]
|
||||
|
||||
# Move to the results directory.
|
||||
os.chdir(results_dir)
|
||||
|
||||
# Create the quicklook plot.
|
||||
cmd = f"rcmpic.py -id {runid}"
|
||||
_ = subprocess.run(cmd, shell=True, check=True)
|
||||
|
||||
# Add the plot to the list.
|
||||
path = os.path.join(results_dir, "qkrcmpic.png")
|
||||
quicklook_plots.append(path)
|
||||
|
||||
# Return to the original directory.
|
||||
os.chdir(cwd)
|
||||
|
||||
# Return the list of quicklook plots.
|
||||
return quicklook_plots
|
||||
|
||||
|
||||
def merge_RCM_quicklook_plots(quicklook_plots: list):
|
||||
"""Merge the RCM quicklook plots for all runs.
|
||||
|
||||
Merge the RCM quicklook plots for all runs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
quicklook_plots : list of str
|
||||
List of quicklook plots to merge.
|
||||
|
||||
Returns
|
||||
-------
|
||||
merged_plot : str
|
||||
Path to merged quicklook file.
|
||||
|
||||
Raises
|
||||
------
|
||||
None
|
||||
"""
|
||||
# Merge RCM quicklooks.
|
||||
merged_plot = "combined_qkrcmpic.png"
|
||||
cmd = f"convert {' '.join(quicklook_plots)} -append {merged_plot}"
|
||||
print(f"cmd = {cmd}")
|
||||
_ = subprocess.run(cmd, shell=True, check=True)
|
||||
return merged_plot
|
||||
|
||||
|
||||
def compare_mage_runs(args):
|
||||
"""Compare a set of MAGE model runs.
|
||||
|
||||
@@ -710,20 +633,6 @@ def compare_mage_runs(args):
|
||||
print(f"merged_remix_plot_n = {merged_remix_plot_n}")
|
||||
print(f"merged_remix_plot_s = {merged_remix_plot_s}")
|
||||
|
||||
# Create the RCM quicklook plots.
|
||||
if verbose:
|
||||
print("Creating RCM quicklook plots.")
|
||||
rcm_plots = create_RCM_quicklook_plots(run_xml_files)
|
||||
if debug:
|
||||
print(f"rcm_plots = {rcm_plots}")
|
||||
|
||||
# Create the merged RCM quicklook plots.
|
||||
if verbose:
|
||||
print("Creating merged RCM quicklook plot.")
|
||||
rcm_merged_plots = merge_RCM_quicklook_plots(rcm_plots)
|
||||
if debug:
|
||||
print(f"rcm_merged_plots = {rcm_merged_plots}")
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Post images to Slack.
|
||||
@@ -736,7 +645,6 @@ def compare_mage_runs(args):
|
||||
"combined_msphpic.png",
|
||||
"combined_remix_n.png",
|
||||
"combined_remix_s.png",
|
||||
"combined_qkrcmpic.png",
|
||||
]
|
||||
comments_to_post = [
|
||||
"Real-Time Performance\n\n",
|
||||
@@ -745,7 +653,6 @@ def compare_mage_runs(args):
|
||||
"Magnetosphere Quicklook Comparison Plots\n\n",
|
||||
"REMIX (north) Quicklook Comparison Plots\n\n",
|
||||
"REMIX (south) Quicklook Comparison Plots\n\n",
|
||||
"RCM Quicklook Comparison Plots\n\n",
|
||||
]
|
||||
|
||||
# If loud mode is on, post results to Slack.
|
||||
|
||||
@@ -125,67 +125,6 @@ def compare_GAMERA_results(runxml1: str, runxml2: str, verbose: bool = False):
|
||||
return TEST_PASS
|
||||
|
||||
|
||||
def compare_MHDRCM_results(runxml1: str, runxml2: str, verbose: bool = False):
|
||||
"""Numerically compare the MHD RCM output files from two runs.
|
||||
|
||||
Numerically compare the MHD RCM output files from two runs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
runxm1 : str
|
||||
Path to XML file describing 1st run.
|
||||
runxm2 : str
|
||||
Path to XML file describing 2nd run.
|
||||
verbose : bool
|
||||
Set to True to print verbose information during comparison.
|
||||
|
||||
Returns
|
||||
-------
|
||||
TEST_PASS or TEST_FAIL : str
|
||||
Description of result of comparison.
|
||||
|
||||
Raises
|
||||
------
|
||||
None
|
||||
"""
|
||||
# Determine the directories containing the sets of results.
|
||||
dir1 = os.path.split(runxml1)[0]
|
||||
dir2 = os.path.split(runxml2)[0]
|
||||
|
||||
# Generate a sorted list of output files for the 1st run.
|
||||
pattern1 = os.path.join(dir1, "*.mhdrcm.h5")
|
||||
files1 = glob.glob(pattern1)
|
||||
files = [os.path.split(f)[1] for f in files1]
|
||||
files.sort()
|
||||
|
||||
# Compare each output file in the two directories.
|
||||
# Comparisons are done with h5diff, which must be in the PATH.
|
||||
# Attributes of the steps and other top-level groups are excluded from
|
||||
# comparison.
|
||||
for filename in files:
|
||||
file1 = os.path.join(dir1, filename)
|
||||
file2 = os.path.join(dir2, filename)
|
||||
if verbose:
|
||||
print(f"Numerically comparing {file1} to {file2}.")
|
||||
|
||||
# Compare each step, without attributes.
|
||||
_, step_ids = kaiH5.cntSteps(file1)
|
||||
for step_id in step_ids:
|
||||
step_path = f"/Step#{step_id}"
|
||||
if verbose:
|
||||
print(f" Comparing {step_path}.")
|
||||
cmd = (
|
||||
f"h5diff --exclude-attribute {step_path} {file1} {file2} "
|
||||
f"{step_path}"
|
||||
)
|
||||
cproc = subprocess.run(cmd, shell=True, check=True)
|
||||
if cproc.returncode != 0:
|
||||
return TEST_FAIL
|
||||
|
||||
# Return the result of the comparison.
|
||||
return TEST_PASS
|
||||
|
||||
|
||||
def compare_REMIX_results(runxml1: str, runxml2: str, verbose: bool = False):
|
||||
"""Numerically compare the REMIX output files from two runs.
|
||||
|
||||
@@ -247,67 +186,6 @@ def compare_REMIX_results(runxml1: str, runxml2: str, verbose: bool = False):
|
||||
return TEST_PASS
|
||||
|
||||
|
||||
def compare_RCM_results(runxml1: str, runxml2: str, verbose: bool = False):
|
||||
"""Numerically compare the RCM output files from two runs.
|
||||
|
||||
Numerically compare the RCM output files from two runs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
runxm1 : str
|
||||
Path to XML file describing 1st run.
|
||||
runxm2 : str
|
||||
Path to XML file describing 2nd run.
|
||||
verbose : bool
|
||||
Set to True to print verbose information during comparison.
|
||||
|
||||
Returns
|
||||
-------
|
||||
TEST_PASS or TEST_FAIL : str
|
||||
Description of result of comparison.
|
||||
|
||||
Raises
|
||||
------
|
||||
None
|
||||
"""
|
||||
# Determine the directories containing the sets of results.
|
||||
dir1 = os.path.split(runxml1)[0]
|
||||
dir2 = os.path.split(runxml2)[0]
|
||||
|
||||
# Generate a sorted list of output files for the 1st run.
|
||||
pattern1 = os.path.join(dir1, "*.rcm.h5")
|
||||
files1 = glob.glob(pattern1)
|
||||
files = [os.path.split(f)[1] for f in files1]
|
||||
files.sort()
|
||||
|
||||
# Compare each result file in the two directories.
|
||||
# Comparisons are done with h5diff, which must be in the PATH.
|
||||
# Attributes of the steps and other top-level groups are excluded from
|
||||
# comparison.
|
||||
for filename in files:
|
||||
file1 = os.path.join(dir1, filename)
|
||||
file2 = os.path.join(dir2, filename)
|
||||
if verbose:
|
||||
print(f"Numerically comparing {file1} to {file2}.")
|
||||
|
||||
# Compare each step, without attributes.
|
||||
_, step_ids = kaiH5.cntSteps(file1)
|
||||
for step_id in step_ids:
|
||||
step_path = f"/Step#{step_id}"
|
||||
if verbose:
|
||||
print(f" Comparing {step_path}.")
|
||||
cmd = (
|
||||
f"h5diff --exclude-attribute {step_path} {file1} {file2} "
|
||||
f"{step_path}"
|
||||
)
|
||||
cproc = subprocess.run(cmd, shell=True, check=True)
|
||||
if cproc.returncode != 0:
|
||||
return TEST_FAIL
|
||||
|
||||
# Return the result of the comparison.
|
||||
return TEST_PASS
|
||||
|
||||
|
||||
def compare_VOLTRON_results(runxml1: str, runxml2: str, verbose: bool = False):
|
||||
"""Numerically compare the VOLTRON output files from two runs.
|
||||
|
||||
@@ -417,14 +295,6 @@ def compare_mage_runs_numerical(args: dict):
|
||||
print(f"comparison_result = {comparison_result}")
|
||||
comparison_results.append(comparison_result)
|
||||
|
||||
# Compare the MHD RCM output files.
|
||||
if verbose:
|
||||
print("Comparing MHD RCM output files.")
|
||||
comparison_result = compare_MHDRCM_results(runxml1, runxml2, verbose)
|
||||
if debug:
|
||||
print(f"comparison_result = {comparison_result}")
|
||||
comparison_results.append(comparison_result)
|
||||
|
||||
# Compare the REMIX output files.
|
||||
if verbose:
|
||||
print("Comparing REMIX output files.")
|
||||
@@ -433,14 +303,6 @@ def compare_mage_runs_numerical(args: dict):
|
||||
print(f"comparison_result = {comparison_result}")
|
||||
comparison_results.append(comparison_result)
|
||||
|
||||
# Compare the RCM output files.
|
||||
if verbose:
|
||||
print("Comparing RCM output files.")
|
||||
comparison_result = compare_RCM_results(runxml1, runxml2, verbose)
|
||||
if debug:
|
||||
print(f"comparison_result = {comparison_result}")
|
||||
comparison_results.append(comparison_result)
|
||||
|
||||
# Compare the VOLTRON output files.
|
||||
if verbose:
|
||||
print("Comparing VOLTRON output files.")
|
||||
@@ -454,10 +316,8 @@ def compare_mage_runs_numerical(args: dict):
|
||||
# Detail the test results.
|
||||
test_report_details_string = ""
|
||||
test_report_details_string += f"GAMERA: *{comparison_results[0]}*\n"
|
||||
test_report_details_string += f"MHD RCM: *{comparison_results[1]}*\n"
|
||||
test_report_details_string += f"REMIX: *{comparison_results[2]}*\n"
|
||||
test_report_details_string += f"RCM: *{comparison_results[3]}*\n"
|
||||
test_report_details_string += f"VOLTRON: *{comparison_results[4]}*\n"
|
||||
test_report_details_string += f"REMIX: *{comparison_results[1]}*\n"
|
||||
test_report_details_string += f"VOLTRON: *{comparison_results[2]}*\n"
|
||||
|
||||
# Summarize the test results.
|
||||
if run_description is not None:
|
||||
|
||||
@@ -16,8 +16,7 @@
|
||||
# source tree). Job submission reports to Slack only on failure, report run
|
||||
# in Slack-on-fail (-s) mode.
|
||||
15 00 * * * ssh derecho "/glade/campaign/hao/msphere/automated_kaiju_tests/kaiju-private/testingScripts/run_mage_tests.sh -v -b development 'unitTest.py -sv'" >> /glade/campaign/hao/msphere/automated_kaiju_tests/logs/nightly-tests-2-development.out 2>&1
|
||||
# Disable master unit test for now since it hangs.
|
||||
# 20 00 * * * ssh derecho "/glade/campaign/hao/msphere/automated_kaiju_tests/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'unitTest.py -sv'" >> /glade/campaign/hao/msphere/automated_kaiju_tests/logs/nightly-tests-2-master.out 2>&1
|
||||
20 00 * * * ssh derecho "/glade/campaign/hao/msphere/automated_kaiju_tests/kaiju-private/testingScripts/run_mage_tests.sh -v -b master 'unitTest.py -sv'" >> /glade/campaign/hao/msphere/automated_kaiju_tests/logs/nightly-tests-2-master.out 2>&1
|
||||
|
||||
# Run a weekly dash every Sunday morning for the development branch and
|
||||
# compare to reference development run listed in reference_runs.txt.
|
||||
|
||||
@@ -20,21 +20,18 @@ echo 'The currently loaded modules are:'
|
||||
module list
|
||||
|
||||
echo 'Loading python environment.'
|
||||
export CONDARC="{{ condarc }}"
|
||||
export CONDA_ENVS_PATH="{{ conda_envs_path }}"
|
||||
mage_miniconda3="{{ mage_test_root }}/miniconda3"
|
||||
mage_conda="${mage_miniconda3}/bin/conda"
|
||||
__conda_setup="$($mage_conda 'shell.bash' 'hook' 2> /dev/null)"
|
||||
__conda_setup="$('/glade/u/home/ewinter/miniconda3/bin/conda' 'shell.bash' 'hook' 2> /dev/null)"
|
||||
if [ $? -eq 0 ]; then
|
||||
eval "$__conda_setup"
|
||||
else
|
||||
if [ -f "$mage_miniconda3/etc/profile.d/conda.sh" ]; then
|
||||
. "$mage_miniconda3/etc/profile.d/conda.sh"
|
||||
if [ -f "/glade/u/home/ewinter/miniconda3/etc/profile.d/conda.sh" ]; then
|
||||
. "/glade/u/home/ewinter/miniconda3/etc/profile.d/conda.sh"
|
||||
else
|
||||
export PATH="$mage_miniconda3/bin:$PATH"
|
||||
export PATH="/glade/u/home/ewinter/miniconda3/bin:$PATH"
|
||||
fi
|
||||
fi
|
||||
unset __conda_setup
|
||||
|
||||
conda activate {{ conda_environment }}
|
||||
echo "The current conda environment is ${CONDA_PREFIX}."
|
||||
|
||||
@@ -42,10 +39,6 @@ echo 'Setting up MAGE environment.'
|
||||
source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
echo "The kaiju software is located at ${KAIJUHOME}."
|
||||
|
||||
echo 'Setting up kaipy environment.'
|
||||
source {{ kaipy_private_root }}/kaipy/scripts/setupEnvironment.sh
|
||||
echo "The kaipy software is located at ${KAIPYHOME}."
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
export TMPDIR={{ tmpdir }}
|
||||
export SLACK_BOT_TOKEN={{ slack_bot_token }}
|
||||
@@ -82,10 +75,10 @@ echo 'Generating the solar wind boundary condition file.'
|
||||
{{ cda2wind_cmd }}
|
||||
echo "The solar wind boundary condition file is `ls bcwind.h5`."
|
||||
|
||||
# Generate the RCM configuration file.
|
||||
echo 'Generating the RCM configuration file.'
|
||||
{{ genRCM_cmd }}
|
||||
echo "The RCM configuration file is `ls rcmconfig.h5`."
|
||||
# Generate the RAIJU configuration file.
|
||||
echo 'Generating the RAIJU configuration file.'
|
||||
genRAIJU
|
||||
echo "The RAIJU configuration file is `ls raijuconfig.h5`."
|
||||
|
||||
# Moved this setting here to avoid error from genLFM.py.
|
||||
export OMP_NUM_THREADS=128
|
||||
@@ -99,7 +92,7 @@ eval $cmd
|
||||
|
||||
# Run the comparison. Post results to Slack if any test fails.
|
||||
reference_run=`cat /glade/u/home/ewinter/mage_testing/test_runs/derecho_configuration_check_runs.txt`
|
||||
cmd="python $KAIJUHOME/testingScripts/compare_mage_runs_numerical.py -sv --run_description='`derecho` configuration check' $reference_run `pwd`/weeklyDashGo.xml >& compare_mage_runs_numerical.out"
|
||||
cmd="python $KAIJUHOME/testingScripts/compare_mage_runs_numerical.py -sv --run_description='derecho configuration check' $reference_run `pwd`/weeklyDashGo.xml >& compare_mage_runs_numerical.out"
|
||||
echo "Run comparison command is:"
|
||||
echo $cmd
|
||||
eval $cmd
|
||||
|
||||
@@ -143,16 +143,13 @@ def derecho_configuration_check(args: dict):
|
||||
make_cmd = "make voltron_mpi.x"
|
||||
|
||||
# Create the command to generate the LFM grid.
|
||||
genLFM_cmd = "genLFM.py -gid Q"
|
||||
genLFM_cmd = "genLFM -gid Q"
|
||||
|
||||
# Create the command to generate the solar wind boundary condition file.
|
||||
cda2wind_cmd = (
|
||||
"cda2wind.py -t0 2016-08-09T02:00:00 -t1 2016-08-09T12:00:00"
|
||||
"cda2wind -t0 2016-08-09T02:00:00 -t1 2016-08-09T12:00:00"
|
||||
)
|
||||
|
||||
# Create the command to generate the RCM configuration.
|
||||
genRCM_cmd = "genRCM.py"
|
||||
|
||||
# Create the command for launching an MPI program.
|
||||
mpiexec_cmd = f"mpiexec {KAIJUHOME}/scripts/preproc/pinCpuCores.sh"
|
||||
|
||||
@@ -179,13 +176,10 @@ def derecho_configuration_check(args: dict):
|
||||
pbs_options["job_priority"] = os.environ["DERECHO_TESTING_PRIORITY"]
|
||||
pbs_options["walltime"] = "08:00:00"
|
||||
pbs_options["modules"] = module_names
|
||||
pbs_options["condarc"] = os.environ["CONDARC"]
|
||||
pbs_options["conda_envs_path"] = os.environ["CONDA_ENVS_PATH"]
|
||||
pbs_options["conda_environment"] = os.environ["CONDA_ENVIRONMENT"]
|
||||
pbs_options["mage_test_root"] = os.environ["MAGE_TEST_ROOT"]
|
||||
pbs_options["mage_test_set_root"] = os.environ["MAGE_TEST_SET_ROOT"]
|
||||
pbs_options["kaijuhome"] = KAIJUHOME
|
||||
pbs_options["kaipy_private_root"] = os.environ["KAIPY_PRIVATE_ROOT"]
|
||||
pbs_options["tmpdir"] = os.environ["TMPDIR"]
|
||||
pbs_options["slack_bot_token"] = os.environ["SLACK_BOT_TOKEN"]
|
||||
pbs_options["branch_or_commit"] = os.environ["BRANCH_OR_COMMIT"]
|
||||
@@ -193,7 +187,6 @@ def derecho_configuration_check(args: dict):
|
||||
pbs_options["make_cmd"] = make_cmd
|
||||
pbs_options["genLFM_cmd"] = genLFM_cmd
|
||||
pbs_options["cda2wind_cmd"] = cda2wind_cmd
|
||||
pbs_options["genRCM_cmd"] = genRCM_cmd
|
||||
pbs_options["mpiexec_cmd"] = mpiexec_cmd
|
||||
pbs_options["voltron_cmd"] = voltron_cmd
|
||||
|
||||
|
||||
@@ -19,11 +19,9 @@ module load {{ module }}
|
||||
module list
|
||||
|
||||
echo 'Loading python environment.'
|
||||
mage_test_root=$HOME
|
||||
if [ -d "${mage_test_root}/miniconda3" ]; then
|
||||
echo 'Loading local miniconda3'
|
||||
mage_test_root='{{ mage_test_root }}'
|
||||
export CONDARC="${mage_test_root}/.condarc"
|
||||
export CONDA_ENVS_PATH="${mage_test_root}/.conda"
|
||||
mage_miniconda3="${mage_test_root}/miniconda3"
|
||||
mage_conda="${mage_miniconda3}/bin/conda"
|
||||
__conda_setup="$($mage_conda 'shell.bash' 'hook' 2> /dev/null)"
|
||||
@@ -45,7 +43,6 @@ conda activate {{ conda_environment }}
|
||||
|
||||
echo 'Setting up MAGE environment.'
|
||||
source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
source {{ kaipyhome }}/kaipy/scripts/setupEnvironment.sh
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
export TMPDIR={{ tmpdir }}
|
||||
|
||||
@@ -305,7 +305,7 @@ def intelChecks(args: dict):
|
||||
# Generate bcwind data file.
|
||||
if verbose:
|
||||
print("Creating bcwind data file.")
|
||||
cmd = "cda2wind.py -t0 2016-08-09T09:00:00 -t1 2016-08-09T11:00:00"
|
||||
cmd = "cda2wind -t0 2016-08-09T09:00:00 -t1 2016-08-09T11:00:00"
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
@@ -315,7 +315,7 @@ def intelChecks(args: dict):
|
||||
f"{module_set_name}.\n"
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
"See testing log for output from cda2wind.py.\n"
|
||||
"See testing log for output from cda2wind.\n"
|
||||
"Skipping remaining steps for module set"
|
||||
f"{module_set_name}\n")
|
||||
continue
|
||||
@@ -325,7 +325,7 @@ def intelChecks(args: dict):
|
||||
# Generate the LFM grid file.
|
||||
if verbose:
|
||||
print("Creating LFM grid file.")
|
||||
cmd = "genLFM.py -gid D"
|
||||
cmd = "genLFM -gid D"
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
@@ -335,7 +335,7 @@ def intelChecks(args: dict):
|
||||
f"{module_set_name}.\n"
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
"See testing log for output from genLFM.py.\n"
|
||||
"See testing log for output from genLFM.\n"
|
||||
"Skipping remaining steps for module set"
|
||||
f"{module_set_name}\n")
|
||||
continue
|
||||
@@ -345,7 +345,7 @@ def intelChecks(args: dict):
|
||||
# Generate the Raiju configuration file.
|
||||
if verbose:
|
||||
print("Creating Raiju configuration file.")
|
||||
cmd = "genRAIJU.py"
|
||||
cmd = "genRAIJU"
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
@@ -355,7 +355,7 @@ def intelChecks(args: dict):
|
||||
f" for module set {module_set_name}.\n"
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
"See testing log for output from genRAIJU.py.\n"
|
||||
"See testing log for output from genRAIJU.\n"
|
||||
"Skipping remaining steps for module set "
|
||||
f"{module_set_name}\n")
|
||||
continue
|
||||
@@ -373,7 +373,6 @@ def intelChecks(args: dict):
|
||||
pbs_options["job_priority"] = os.environ["DERECHO_TESTING_PRIORITY"]
|
||||
pbs_options["modules"] = module_names
|
||||
pbs_options["kaijuhome"] = KAIJUHOME
|
||||
pbs_options["kaipyhome"] = os.environ["KAIPYHOME"]
|
||||
pbs_options["tmpdir"] = os.environ["TMPDIR"]
|
||||
pbs_options["slack_bot_token"] = os.environ["SLACK_BOT_TOKEN"]
|
||||
pbs_options["mage_test_root"] = os.environ["MAGE_TEST_ROOT"]
|
||||
|
||||
@@ -19,11 +19,11 @@ module load {{ module }}
|
||||
echo 'The currently loaded modules are:'
|
||||
module list
|
||||
|
||||
echo 'Loading python environment.'
|
||||
mage_test_root=$HOME
|
||||
if [ -d "${mage_test_root}/miniconda3" ]; then
|
||||
echo 'Loading local miniconda3'
|
||||
export CONDARC="{{ condarc }}"
|
||||
export CONDA_ENVS_PATH="{{ conda_envs_path }}"
|
||||
mage_miniconda3="{{ mage_test_root }}/miniconda3"
|
||||
mage_miniconda3="${mage_test_root}/miniconda3"
|
||||
mage_conda="${mage_miniconda3}/bin/conda"
|
||||
__conda_setup="$($mage_conda 'shell.bash' 'hook' 2> /dev/null)"
|
||||
if [ $? -eq 0 ]; then
|
||||
@@ -47,10 +47,6 @@ echo 'Setting up MAGE environment.'
|
||||
source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
echo "The kaiju software is located at ${KAIJUHOME}."
|
||||
|
||||
echo 'Setting up kaipy environment.'
|
||||
source {{ kaipy_private_root }}/kaipy/scripts/setupEnvironment.sh
|
||||
echo "The kaipy software is located at ${KAIPYHOME}."
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
export TMPDIR={{ tmpdir }}
|
||||
export SLACK_BOT_TOKEN={{ slack_bot_token }}
|
||||
|
||||
@@ -84,6 +84,35 @@ MAGE_REPRODUCIBILITY_CHECK_PBS_TEMPLATE_FILE = os.path.join(
|
||||
MAGE_REPRODUCIBILITY_CHECK_PBS_SCRIPT = "mage_reproducibility_check.pbs"
|
||||
|
||||
|
||||
def create_command_line_parser():
|
||||
"""Create the command-line argument parser.
|
||||
|
||||
Create the parser for command-line arguments.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
None
|
||||
|
||||
Returns
|
||||
-------
|
||||
parser : argparse.ArgumentParser
|
||||
Command-line argument parser for this script.
|
||||
|
||||
Raises
|
||||
------
|
||||
None
|
||||
"""
|
||||
parser = common.create_command_line_parser(DESCRIPTION)
|
||||
parser.add_argument(
|
||||
"--module_set_file", "-f", default=DEFAULT_MODULE_SET_FILE,
|
||||
help=(
|
||||
"Path to text file containing set of modules to build with "
|
||||
"(default: %(default)s)"
|
||||
)
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
def mage_reproducibility_check(args: dict):
|
||||
"""Perform a MAGE reproducibility check.
|
||||
|
||||
@@ -146,7 +175,6 @@ def mage_reproducibility_check(args: dict):
|
||||
module_names, cmake_environment, cmake_options = (
|
||||
common.read_build_module_list_file(module_set_file)
|
||||
)
|
||||
print(f"module_names = {module_names}")
|
||||
|
||||
# Extract the name of the list.
|
||||
module_set_name = os.path.split(module_set_file)[-1].rstrip(".lst")
|
||||
@@ -213,12 +241,11 @@ def mage_reproducibility_check(args: dict):
|
||||
pbs_template = Template(template_content)
|
||||
|
||||
# Assemble commands needed in the PBS script.
|
||||
genLFM_cmd = "genLFM.py -gid Q"
|
||||
genLFM_cmd = "genLFM -gid Q"
|
||||
cda2wind_cmd = (
|
||||
"cda2wind.py -t0 2016-08-09T02:00:00 -t1 2016-08-09T12:00:00"
|
||||
"cda2wind -t0 2016-08-09T02:00:00 -t1 2016-08-09T12:00:00"
|
||||
)
|
||||
genRCM_cmd = "genRCM.py"
|
||||
genRaiju_cmd = "genRAIJU.py"
|
||||
genRaiju_cmd = "genRAIJU"
|
||||
mpiexec_cmd = f"mpiexec {KAIJUHOME}/scripts/preproc/pinCpuCores.sh"
|
||||
voltron_cmd = "../bin/voltron_mpi.x weeklyDashGo.xml"
|
||||
|
||||
@@ -241,13 +268,10 @@ def mage_reproducibility_check(args: dict):
|
||||
pbs_options["job_priority"] = os.environ["DERECHO_TESTING_PRIORITY"]
|
||||
pbs_options["walltime"] = "08:00:00"
|
||||
pbs_options["modules"] = module_names
|
||||
pbs_options["condarc"] = os.environ["CONDARC"]
|
||||
pbs_options["conda_envs_path"] = os.environ["CONDA_ENVS_PATH"]
|
||||
pbs_options["conda_environment"] = os.environ["CONDA_ENVIRONMENT"]
|
||||
pbs_options["mage_test_root"] = os.environ["MAGE_TEST_ROOT"]
|
||||
pbs_options["mage_test_set_root"] = os.environ["MAGE_TEST_SET_ROOT"]
|
||||
pbs_options["conda_environment"] = os.environ["CONDA_ENVIRONMENT"]
|
||||
pbs_options["kaijuhome"] = KAIJUHOME
|
||||
pbs_options["kaipy_private_root"] = os.environ["KAIPY_PRIVATE_ROOT"]
|
||||
pbs_options["tmpdir"] = os.environ["TMPDIR"]
|
||||
pbs_options["slack_bot_token"] = os.environ["SLACK_BOT_TOKEN"]
|
||||
pbs_options["branch_or_commit"] = os.environ["BRANCH_OR_COMMIT"]
|
||||
@@ -255,7 +279,6 @@ def mage_reproducibility_check(args: dict):
|
||||
pbs_options["make_cmd"] = make_cmd
|
||||
pbs_options["genLFM_cmd"] = genLFM_cmd
|
||||
pbs_options["cda2wind_cmd"] = cda2wind_cmd
|
||||
pbs_options["genRCM_cmd"] = genRCM_cmd
|
||||
pbs_options["genRaiju_cmd"] = genRaiju_cmd
|
||||
pbs_options["mpiexec_cmd"] = mpiexec_cmd
|
||||
pbs_options["voltron_cmd"] = voltron_cmd
|
||||
@@ -326,13 +349,10 @@ def mage_reproducibility_check(args: dict):
|
||||
pbs_options["job_priority"] = os.environ["DERECHO_TESTING_PRIORITY"]
|
||||
pbs_options["walltime"] = "02:00:00"
|
||||
pbs_options["modules"] = module_names
|
||||
pbs_options["condarc"] = os.environ["CONDARC"]
|
||||
pbs_options["conda_envs_path"] = os.environ["CONDA_ENVS_PATH"]
|
||||
pbs_options["conda_environment"] = os.environ["CONDA_ENVIRONMENT"]
|
||||
pbs_options["mage_test_root"] = os.environ["MAGE_TEST_ROOT"]
|
||||
pbs_options["mage_test_set_root"] = os.environ["MAGE_TEST_SET_ROOT"]
|
||||
pbs_options["conda_environment"] = os.environ["CONDA_ENVIRONMENT"]
|
||||
pbs_options["kaijuhome"] = KAIJUHOME
|
||||
pbs_options["kaipy_private_root"] = os.environ["KAIPY_PRIVATE_ROOT"]
|
||||
pbs_options["tmpdir"] = os.environ["TMPDIR"]
|
||||
pbs_options["slack_bot_token"] = os.environ["SLACK_BOT_TOKEN"]
|
||||
pbs_options["branch_or_commit"] = os.environ["BRANCH_OR_COMMIT"]
|
||||
@@ -403,7 +423,7 @@ def mage_reproducibility_check(args: dict):
|
||||
def main():
|
||||
"""Driver for command-line version of code."""
|
||||
# Set up the command-line parser.
|
||||
parser = common.create_command_line_parser(DESCRIPTION)
|
||||
parser = create_command_line_parser()
|
||||
|
||||
# # Add additional arguments specific to this script.
|
||||
# parser.add_argument(
|
||||
|
||||
@@ -12,9 +12,9 @@
|
||||
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
echo 'Loading python environment.'
|
||||
mage_test_root=$HOME
|
||||
if [ -d "${mage_test_root}/miniconda3" ]; then
|
||||
echo 'Loading local miniconda3'
|
||||
mage_test_root='{{ mage_test_root }}'
|
||||
export CONDARC="${mage_test_root}/.condarc"
|
||||
export CONDA_ENVS_PATH="${mage_test_root}/.conda"
|
||||
mage_miniconda3="${mage_test_root}/miniconda3"
|
||||
@@ -54,7 +54,7 @@ echo 'The active environment variables are:'
|
||||
printenv
|
||||
|
||||
# Process the data and generate the output video
|
||||
python $KAIPYHOME/kaipy/scripts/quicklook/gamerrVid.py -d1 {{ case1F }} -id1 {{ case1id }} -d2 {{ case2F }} -id2 {{ case2id }} -o {{ frameFolder }}/{{ caseName }} -ts {{ ts }} -te {{ te }} -dt {{ dt }} -Nth 9 >& {{ caseName }}.out
|
||||
gamerrVid -d1 {{ case1F }} -id1 {{ case1id }} -d2 {{ case2F }} -id2 {{ case2id }} -o {{ frameFolder }}/{{ caseName }} -ts {{ ts }} -te {{ te }} -dt {{ dt }} -Nth 9 >& {{ caseName }}.out
|
||||
|
||||
# copy output video to test root folder
|
||||
cp {{ frameFolder }}/{{ caseName }}.mp4 $MAGE_TEST_SET_ROOT/.
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
echo "Job $PBS_JOBID started at `date` on `hostname` in directory `pwd`."
|
||||
|
||||
echo 'Loading python environment.'
|
||||
mage_test_root=$HOME
|
||||
if [ -d "${mage_test_root}/miniconda3" ]; then
|
||||
echo 'Loading local miniconda3'
|
||||
mage_test_root='{{ mage_test_root }}'
|
||||
|
||||
@@ -23,6 +23,7 @@ module load {{ module }}
|
||||
module list
|
||||
|
||||
echo 'Loading python environment.'
|
||||
mage_test_root=$HOME
|
||||
if [ -d "${mage_test_root}/miniconda3" ]; then
|
||||
echo 'Loading local miniconda3'
|
||||
mage_test_root='{{ mage_test_root }}'
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
<Kaiju>
|
||||
<VOLTRON>
|
||||
<time tFin="7201.0"/>
|
||||
<spinup doSpin="T" tSpin="3600.0"/>
|
||||
<spinup doSpin="T" tSpin="3600.0" tIO="-600.0"/>
|
||||
<output dtOut="60.0" tsOut="100"/>
|
||||
<coupling dtCouple="5.0" imType="RAIJU" doQkSquish="F" qkSquishStride="2" doAsyncCoupling="F" doSerial="{{ serial_coupling }}"/>
|
||||
<restart dtRes="3600.0"/>
|
||||
@@ -41,7 +41,7 @@
|
||||
<output loudConsole="T" doFat="F" doLossExtras="F" doDebug="F" writeGhosts="F"/>
|
||||
<grid gType="SHGRID" ThetaL="15" ThetaU="50"/>
|
||||
<domain tail_buffer="15.0" sun_buffer="15.0" tail_active="12.0" sun_active="12.0"/>
|
||||
<sim pdmb="0.75"/>
|
||||
<sim pdmb="0.75" doSmoothgrads="F"/>
|
||||
<config fname="raijuconfig.h5"/>
|
||||
<plasmasphere doPsphere="T" doExcessMap="T"/>
|
||||
<losses doLosses="T" doCX="T" doSS="T" doCC="T"/>
|
||||
|
||||
@@ -39,9 +39,6 @@ TEST_DIRECTORY = os.path.join(MAGE_TEST_SET_ROOT, 'compTest')
|
||||
# Home directory of kaiju installation
|
||||
KAIJUHOME = os.environ['KAIJUHOME']
|
||||
|
||||
# Home directory of kaipy installation
|
||||
KAIPYHOME = os.environ['KAIPYHOME']
|
||||
|
||||
# Path to directory containing the test scripts
|
||||
TEST_SCRIPTS_DIRECTORY = os.path.join(KAIJUHOME, 'testingScripts')
|
||||
|
||||
@@ -610,7 +607,7 @@ def main():
|
||||
# Generate the LFM grid file.
|
||||
if verbose:
|
||||
print('Creating LFM grid file.')
|
||||
cmd = 'genLFM.py -gid D'
|
||||
cmd = 'genLFM -gid D'
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
@@ -620,7 +617,7 @@ def main():
|
||||
f"{module_set_name}.\n"
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See testing log for output from genLFM.py.\n'
|
||||
'See testing log for output from genLFM.\n'
|
||||
'Skipping remaining steps for module set'
|
||||
f"{module_set_name}\n")
|
||||
continue
|
||||
@@ -628,7 +625,7 @@ def main():
|
||||
# Generate the solar wind boundary condition file.
|
||||
if verbose:
|
||||
print('Creating solar wind initial conditions file.')
|
||||
cmd = 'cda2wind.py -t0 2016-08-09T02:00:00 -t1 2016-08-09T12:00:00'
|
||||
cmd = 'cda2wind -t0 2016-08-09T02:00:00 -t1 2016-08-09T12:00:00'
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
@@ -638,7 +635,7 @@ def main():
|
||||
f" for module set {module_set_name}.\n"
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See testing log for output from cda2wind.py.\n'
|
||||
'See testing log for output from cda2wind.\n'
|
||||
'Skipping remaining steps for module set'
|
||||
f"{module_set_name}\n")
|
||||
continue
|
||||
@@ -646,7 +643,7 @@ def main():
|
||||
# Generate the Raiju configuration file.
|
||||
if verbose:
|
||||
print('Creating Raiju configuration file.')
|
||||
cmd = 'genRAIJU.py'
|
||||
cmd = 'genRAIJU'
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
@@ -656,7 +653,7 @@ def main():
|
||||
f" for module set {module_set_name}.\n"
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
'See testing log for output from genRCM.py.\n'
|
||||
'See testing log for output from genRAIJU.\n'
|
||||
'Skipping remaining steps for module set '
|
||||
f"{module_set_name}\n")
|
||||
continue
|
||||
@@ -668,7 +665,6 @@ def main():
|
||||
base_pbs_options['job_priority'] = os.environ['DERECHO_TESTING_PRIORITY']
|
||||
base_pbs_options['modules'] = module_names
|
||||
base_pbs_options['kaijuhome'] = KAIJUHOME
|
||||
base_pbs_options['kaipyhome'] = KAIPYHOME
|
||||
base_pbs_options['tmpdir'] = os.environ['TMPDIR']
|
||||
base_pbs_options['slack_bot_token'] = os.environ['SLACK_BOT_TOKEN']
|
||||
base_pbs_options['mage_test_root'] = os.environ['MAGE_TEST_ROOT']
|
||||
|
||||
@@ -19,11 +19,11 @@ module load {{ module }}
|
||||
echo 'The currently loaded modules are:'
|
||||
module list
|
||||
|
||||
echo 'Loading python environment.'
|
||||
mage_test_root=$HOME
|
||||
if [ -d "${mage_test_root}/miniconda3" ]; then
|
||||
echo 'Loading local miniconda3'
|
||||
export CONDARC="{{ condarc }}"
|
||||
export CONDA_ENVS_PATH="{{ conda_envs_path }}"
|
||||
mage_miniconda3="{{ mage_test_root }}/miniconda3"
|
||||
mage_miniconda3="${mage_test_root}/miniconda3"
|
||||
mage_conda="${mage_miniconda3}/bin/conda"
|
||||
__conda_setup="$($mage_conda 'shell.bash' 'hook' 2> /dev/null)"
|
||||
if [ $? -eq 0 ]; then
|
||||
@@ -47,10 +47,6 @@ echo 'Setting up MAGE environment.'
|
||||
source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
echo "The kaiju software is located at ${KAIJUHOME}."
|
||||
|
||||
echo 'Setting up kaipy environment.'
|
||||
source {{ kaipy_private_root }}/kaipy/scripts/setupEnvironment.sh
|
||||
echo "The kaipy software is located at ${KAIPYHOME}."
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
export TMPDIR={{ tmpdir }}
|
||||
export SLACK_BOT_TOKEN={{ slack_bot_token }}
|
||||
|
||||
381
testingScripts/run_mage_tests.sh
Executable file
381
testingScripts/run_mage_tests.sh
Executable file
@@ -0,0 +1,381 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# IMPORTANT NOTES:
|
||||
|
||||
# This bash script was designed to run from a cron job on the 'cron' host at
|
||||
# NCAR, using ssh to execute this script on a derecho login node. See
|
||||
# kaiju/testingScripts/crontab for an example of how to invoke this script.
|
||||
|
||||
# SSH must be configured so that ssh from cron to derecho does not require a
|
||||
# password.
|
||||
|
||||
# ############################################################################
|
||||
|
||||
echo '***********************************************************************'
|
||||
echo "Starting $0 at `date` on `hostname`."
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# Force this script to exit on any failure.
|
||||
set -e
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# These *should* be the only variables you have to change when moving the
|
||||
# testing environment around or changing python environments.
|
||||
|
||||
# Root of kaiju testing environment - *everything* goes under here, except
|
||||
# for the outputs from the individual test runs.
|
||||
export MAGE_TEST_ROOT='/glade/campaign/hao/msphere/automated_kaiju_tests'
|
||||
|
||||
# Root of kaiju test results tree.
|
||||
export MAGE_TEST_RESULTS_ROOT='/glade/derecho/scratch/ewinter/mage_testing'
|
||||
|
||||
# Location of the miniconda installation used for testing.
|
||||
export MAGE_MINICONDA='/glade/u/home/ewinter/miniconda3'
|
||||
|
||||
# Setup command for conda.
|
||||
__conda_setup="$('/glade/u/home/ewinter/miniconda3/bin/conda' 'shell.bash' 'hook' 2> /dev/null)"
|
||||
|
||||
# conda environment for testing
|
||||
export CONDA_ENVIRONMENT='kaiju-3.12-testing'
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# Other exported environment variables
|
||||
|
||||
# Path to directory containing the dateime-stamped directories for individual
|
||||
# sets of test runs.
|
||||
export MAGE_TEST_RUNS_ROOT="${MAGE_TEST_RESULTS_ROOT}/test_runs"
|
||||
|
||||
# PBS account to use for running tests on derecho
|
||||
export DERECHO_TESTING_ACCOUNT='P28100045'
|
||||
|
||||
# PBS queue to use for running tests on derecho
|
||||
export DERECHO_TESTING_QUEUE='main'
|
||||
|
||||
# PBS priority to use for running tests on derecho
|
||||
export DERECHO_TESTING_PRIORITY='economy'
|
||||
|
||||
# Set the token for sending messages to Slack.
|
||||
export SLACK_BOT_TOKEN=`cat $HOME/.ssh/slack.txt`
|
||||
|
||||
# IMPORTANT: Set this environment variable to force the python print()
|
||||
# function to automatically flush its output in all of the testing scripts.
|
||||
# This will ensure that output from the testing scripts is logged in the order
|
||||
# that it is created.
|
||||
export PYTHONUNBUFFERED='TRUE'
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
# Non-exported variables used by this script
|
||||
|
||||
# Path to the SSH key to use for running the tests. The key must have no
|
||||
# passphrase. This is needed to allow passwordless access to BitBucket.
|
||||
ssh_key_for_testing='/glade/u/home/ewinter/.ssh/id_rsa_kaiju_testing'
|
||||
|
||||
# Setup script for CDF code
|
||||
cdf_setup_script="${MAGE_TEST_ROOT}/local/cdf/3.9.0/bin/definitions.B"
|
||||
|
||||
# Address of kaiju-private repository on BitBucket.
|
||||
kaiju_repository='git@bitbucket.org:aplkaiju/kaiju-private.git'
|
||||
|
||||
# Name of local directory containing clone of repository.
|
||||
local_kaiju_name='kaiju'
|
||||
|
||||
# Default kaiju code branch to test if no branch, commit, or tag is specified.
|
||||
default_branch_to_test='development'
|
||||
|
||||
# Output string to separate results from different tests.
|
||||
test_output_separator='------------------------------------------------------'
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# Define the command-line help function.
|
||||
Help()
|
||||
{
|
||||
# Display Help
|
||||
echo 'Control script for running MAGE tests via cron and ssh.'
|
||||
echo
|
||||
echo "Syntax: run_mage_tests.sh [-b branch] [-c commit] [-d] [-h] [-v] 'test1[,test2,...]'"
|
||||
echo 'options:'
|
||||
echo 'b branch Run tests using this branch'
|
||||
echo 'c commit Run tests using this commit (or tag)'
|
||||
echo 'd Use debug mode.'
|
||||
echo 'h Print this help message.'
|
||||
echo 'v Use verbose mode.'
|
||||
echo
|
||||
echo 'If no branch or commit is specified, the latest commit on the development branch will be used for the tests.'
|
||||
echo "Each test can have its own options, e.g. 'buildTest.py -d,unitTest.py -lv'"
|
||||
}
|
||||
|
||||
# Process command-line options.
|
||||
branch=''
|
||||
commit=''
|
||||
debug=false
|
||||
verbose=false
|
||||
while getopts ':b:c:dhv' option; do
|
||||
case $option in
|
||||
b) # Test a specific branch
|
||||
branch=$OPTARG;;
|
||||
c) # Test a specific commit or tag
|
||||
commit=$OPTARG;;
|
||||
d) # debug mode
|
||||
debug=true;;
|
||||
h) # display Help
|
||||
Help
|
||||
exit;;
|
||||
v) # verbose mode
|
||||
verbose=true;;
|
||||
\?) # Invalid option
|
||||
echo 'Error: Invalid option'
|
||||
exit 1;;
|
||||
esac
|
||||
done
|
||||
|
||||
if $debug; then
|
||||
echo "branch=${branch}"
|
||||
echo "commit=${commit}"
|
||||
echo "debug=${debug}"
|
||||
echo "help=${help}"
|
||||
echo "verbose=${verbose}"
|
||||
fi
|
||||
|
||||
# Fetch the branch or commit to test. If neither specified, use the default.
|
||||
if [[ -n $branch && -n $commit ]]; then
|
||||
echo 'Cannot specify branch and commit together!'
|
||||
exit 1
|
||||
fi
|
||||
if [[ -z $branch && -z $commit ]]; then
|
||||
branch=$default_branch_to_test
|
||||
fi
|
||||
|
||||
# At this point, either branch is specified, or commit/tag is specified.
|
||||
# There should be no case where both are unspecified, or both are specified.
|
||||
if [[ -n $branch ]]; then
|
||||
export BRANCH_OR_COMMIT=$branch
|
||||
else
|
||||
export BRANCH_OR_COMMIT=$commit
|
||||
fi
|
||||
if $debug; then
|
||||
echo "BRANCH_OR_COMMIT=${BRANCH_OR_COMMIT}"
|
||||
fi
|
||||
|
||||
# Fetch the list of tests to run.
|
||||
tests_to_run_str=$BASH_ARGV
|
||||
if $debug; then
|
||||
echo "tests_to_run_str=${tests_to_run_str}"
|
||||
fi
|
||||
|
||||
# Split the test list string into an array.
|
||||
# Tests are separated by comma, no spaces.
|
||||
IFS="," tests_to_run=($tests_to_run_str)
|
||||
if $debug; then
|
||||
echo 'The test commands to run are:'
|
||||
for test_cmd in "${tests_to_run[@]}"
|
||||
do
|
||||
echo $test_cmd
|
||||
done
|
||||
fi
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# Load the SSH private key for testing. This is needed for BitBucket access.
|
||||
# Note that this key does not have a passphrase, so it can easily be used
|
||||
# from a cron job.
|
||||
if $verbose; then
|
||||
echo 'Loading SSH key into key agent.'
|
||||
fi
|
||||
eval `ssh-agent -s`
|
||||
ssh-add $ssh_key_for_testing
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# Set up the module system (needed when running from cron, harmless
|
||||
# otherwise).
|
||||
if $verbose; then
|
||||
echo 'Setting up module system.'
|
||||
fi
|
||||
source /etc/profile.d/z00_modules.sh
|
||||
|
||||
# List at-start modules.
|
||||
# The 2>&1 (no spaces!) is needed since the 'module' command sends
|
||||
# output to stderr by default, and so it is lost when stdout is sent
|
||||
# back to the user.
|
||||
if $verbose; then
|
||||
echo 'At start, the loaded modules are:'
|
||||
module list 2>&1
|
||||
fi
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# Activate the conda installation for MAGE testing.
|
||||
if $verbose; then
|
||||
echo "Setting up conda environment for MAGE testing ${CONDA_ENVIRONMENT}."
|
||||
fi
|
||||
|
||||
# This code is based on the code that the miniconda installer puts into
|
||||
# ~/.bashrc or ~/.bash_profile when a user installs miniconda.
|
||||
# This code is needed since $HOME/.bashrc is not run.
|
||||
if [ $? -eq 0 ]; then
|
||||
eval "$__conda_setup"
|
||||
else
|
||||
if [ -f "${MAGE_MINICONDA}/etc/profile.d/conda.sh" ]; then
|
||||
. "${MAGE_MINICONDA}/etc/profile.d/conda.sh"
|
||||
else
|
||||
export PATH="${MAGE_MINICONDA}/bin:$PATH"
|
||||
fi
|
||||
fi
|
||||
unset __conda_setup
|
||||
conda activate $CONDA_ENVIRONMENT
|
||||
if $verbose; then
|
||||
echo "conda environment is `echo $CONDA_PREFIX`"
|
||||
fi
|
||||
|
||||
if $verbose; then
|
||||
echo 'The installed version of kaipy in this environment is:'
|
||||
conda list kaipy
|
||||
fi
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# Make the CDF library available (needed by the satellite comparison
|
||||
# python scripts).
|
||||
if $verbose; then
|
||||
echo 'Sourcing CDF setup script.'
|
||||
fi
|
||||
source $cdf_setup_script
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# Create the ISO 8601 datetime stamp for this set of tests.
|
||||
testing_datetime=`date --iso-8601=seconds`
|
||||
if $debug; then
|
||||
echo "testing_datetime=${testing_datetime}"
|
||||
fi
|
||||
|
||||
# <HACK>
|
||||
# The make tool cannot handle file or directory names that contain a colon
|
||||
# (':'). Process the ISO 8601 date time string from the ISO 8601 form:
|
||||
# YYYY-MM-DDTHH:mm:ss[+-]hh:mm
|
||||
# into the more compact form:
|
||||
# YYYYMMDD_HHMMSS
|
||||
# The sed commands to do this are, in order:
|
||||
# s/.\{6\}$// - Remove the time zone offset (last 6 characters)
|
||||
# s/-//g - Delete all '-'.
|
||||
# s/\://g - Delete all ':'.
|
||||
# s/T/_/ - Convert 'T' separating date and time to underscore.
|
||||
testing_datetime=`echo $testing_datetime | sed -e 's/.\{6\}$//' -e 's/-//g' -e 's/\://g' -e 's/T/_/'`
|
||||
if $debug; then
|
||||
echo "testing_datetime=${testing_datetime}"
|
||||
fi
|
||||
# </HACK>
|
||||
|
||||
# Create a directory to hold all of the tests for this set of tests.
|
||||
test_set_dir="${testing_datetime}-${BRANCH_OR_COMMIT}"
|
||||
if $debug; then
|
||||
echo "test_set_dir=${test_set_dir}"
|
||||
fi
|
||||
export MAGE_TEST_SET_ROOT="${MAGE_TEST_RUNS_ROOT}/${test_set_dir}"
|
||||
if $verbose; then
|
||||
echo "Creating directory for this set of tests at ${MAGE_TEST_SET_ROOT}."
|
||||
fi
|
||||
mkdir $MAGE_TEST_SET_ROOT
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# Move to the directory for this set of tests.
|
||||
cd $MAGE_TEST_SET_ROOT
|
||||
|
||||
# Clone the kaiju-private repository, call it kaiju locally.
|
||||
if $verbose; then
|
||||
echo "Cloning repository ${kaiju_repository}."
|
||||
fi
|
||||
git clone $kaiju_repository $local_kaiju_name
|
||||
|
||||
# Move into the repository clone.
|
||||
cd $local_kaiju_name
|
||||
|
||||
# If a branch was requested, switch to that branch. Otherwise, if a commit
|
||||
# or tag was requested, check out that commit or tag.
|
||||
if $verbose; then
|
||||
echo "Checking out branch/commit/tag ${BRANCH_OR_COMMIT}."
|
||||
fi
|
||||
git checkout $BRANCH_OR_COMMIT
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# <HACK>
|
||||
# Back up existing test code, since we need to use test code that is under
|
||||
# development.
|
||||
mkdir TEST_CODE_BACKUP
|
||||
mv testingScripts TEST_CODE_BACKUP/
|
||||
mv tests TEST_CODE_BACKUP/
|
||||
|
||||
# Copy latest test code.
|
||||
new_test_code_root="${MAGE_TEST_ROOT}/kaiju-private"
|
||||
if $verbose; then
|
||||
echo "Copying updated test files from ${new_test_code_root}."
|
||||
fi
|
||||
cp -rp $new_test_code_root/testingScripts ./testingScripts
|
||||
cp -rp $new_test_code_root/tests ./tests
|
||||
# </HACK>
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# Set up the kaiju environment using the just-checked-out code.
|
||||
kaiju_setup_script="${MAGE_TEST_SET_ROOT}/${local_kaiju_name}/scripts/setupEnvironment.sh"
|
||||
if $verbose; then
|
||||
echo "Sourcing kaiju setup script ${kaiju_setup_script}."
|
||||
fi
|
||||
source $kaiju_setup_script
|
||||
if $verbose; then
|
||||
echo "KAIJUHOME is ${KAIJUHOME}."
|
||||
fi
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# List at-start environment variables.
|
||||
if $debug; then
|
||||
echo 'At start, the environment variables are:'
|
||||
printenv
|
||||
fi
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# Compute the location of the test scripts.
|
||||
kaiju_test_scripts_dir="${KAIJUHOME}/testingScripts"
|
||||
if $debug; then
|
||||
echo "kaiju_test_scripts_dir=${kaiju_test_scripts_dir}"
|
||||
fi
|
||||
|
||||
# Run each test.
|
||||
for mage_test in ${tests_to_run[@]}
|
||||
do
|
||||
echo $test_output_separator
|
||||
if $verbose; then
|
||||
echo "Moving to ${MAGE_TEST_SET_ROOT}."
|
||||
fi
|
||||
cd $MAGE_TEST_SET_ROOT
|
||||
cmd="python ${kaiju_test_scripts_dir}/${mage_test}"
|
||||
if $verbose; then
|
||||
echo "Running test '${cmd}' at `date` on `hostname`."
|
||||
fi
|
||||
eval "${cmd}"
|
||||
done
|
||||
echo $test_output_separator
|
||||
|
||||
# ############################################################################
|
||||
|
||||
# Shut down the SSH agent.
|
||||
if $verbose; then
|
||||
echo "Shutting down SSH key agent."
|
||||
fi
|
||||
ssh-agent -k
|
||||
|
||||
# ############################################################################
|
||||
|
||||
echo "Ending $0 at `date` on `hostname`."
|
||||
echo '***********************************************************************'
|
||||
@@ -145,7 +145,7 @@ UNIT_TEST_DATA_INPUT_FILES = [
|
||||
'bcwind.h5',
|
||||
'geo_mpi.xml',
|
||||
'lfmD.h5',
|
||||
'rcmconfig.h5',
|
||||
'raijuconfig.h5',
|
||||
]
|
||||
|
||||
# Names of PBS scripts to create from templates.
|
||||
@@ -425,17 +425,17 @@ def main():
|
||||
print(f"Copying {from_path} to {to_path}.")
|
||||
shutil.copyfile(from_path, to_path)
|
||||
else:
|
||||
cmd = "cda2wind.py -t0 2016-08-09T09:00:00 -t1 2016-08-09T11:00:00"
|
||||
cmd = "cda2wind -t0 2016-08-09T09:00:00 -t1 2016-08-09T11:00:00"
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
text=True, capture_output=True)
|
||||
cmd = "genLFM.py"
|
||||
cmd = "genLFM"
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
text=True, capture_output=True)
|
||||
cmd = "genRAIJU.py"
|
||||
cmd = "genRAIJU"
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
cproc = subprocess.run(cmd, shell=True, check=True,
|
||||
|
||||
@@ -20,9 +20,7 @@ echo 'The currently loaded modules are:'
|
||||
module list
|
||||
|
||||
echo 'Loading python environment.'
|
||||
export CONDARC="{{ condarc }}"
|
||||
export CONDA_ENVS_PATH="{{ conda_envs_path }}"
|
||||
mage_miniconda3="{{ mage_test_root }}/miniconda3"
|
||||
mage_miniconda3="${HOME}/miniconda3"
|
||||
mage_conda="${mage_miniconda3}/bin/conda"
|
||||
__conda_setup="$($mage_conda 'shell.bash' 'hook' 2> /dev/null)"
|
||||
if [ $? -eq 0 ]; then
|
||||
@@ -42,10 +40,6 @@ echo 'Setting up MAGE environment.'
|
||||
source {{ kaijuhome }}/scripts/setupEnvironment.sh
|
||||
echo "The kaiju software is located at ${KAIJUHOME}."
|
||||
|
||||
echo 'Setting up kaipy environment.'
|
||||
source {{ kaipy_private_root }}/kaipy/scripts/setupEnvironment.sh
|
||||
echo "The kaipy software is located at ${KAIPYHOME}."
|
||||
|
||||
echo 'Setting environment variables.'
|
||||
export TMPDIR={{ tmpdir }}
|
||||
export SLACK_BOT_TOKEN={{ slack_bot_token }}
|
||||
@@ -83,10 +77,10 @@ echo 'Generating the solar wind boundary condition file.'
|
||||
{{ cda2wind_cmd }}
|
||||
echo "The solar wind boundary condition file is `ls bcwind.h5`."
|
||||
|
||||
# Generate the RCM configuration file.
|
||||
echo 'Generating the RCM configuration file.'
|
||||
{{ genRCM_cmd }}
|
||||
echo "The RCM configuration file is `ls rcmconfig.h5`."
|
||||
# Generate the raiju configuration file.
|
||||
echo 'Generating the raiju configuration file.'
|
||||
{{ genRAIJU_cmd }}
|
||||
echo "The RAIJU configuration file is `ls raijuconfig.h5`."
|
||||
|
||||
# Run the model.
|
||||
MPICOMMAND="{{ mpiexec_cmd }}"
|
||||
|
||||
@@ -142,15 +142,15 @@ def weekly_dash(args: dict):
|
||||
make_cmd = "make voltron_mpi.x"
|
||||
|
||||
# Create the command to generate the LFM grid.
|
||||
genLFM_cmd = "genLFM.py -gid Q"
|
||||
genLFM_cmd = "genLFM -gid Q"
|
||||
|
||||
# Create the command to generate the solar wind boundary condition file.
|
||||
cda2wind_cmd = (
|
||||
"cda2wind.py -t0 2016-08-09T02:00:00 -t1 2016-08-09T12:00:00"
|
||||
"cda2wind -t0 2016-08-09T02:00:00 -t1 2016-08-09T12:00:00"
|
||||
)
|
||||
|
||||
# Create the command to generate the RCM configuration.
|
||||
genRCM_cmd = "genRCM.py"
|
||||
# Create the command to generate the raiju configuration.
|
||||
genRAIJU_cmd = "genRAIJU"
|
||||
|
||||
# Create the command for launching an MPI program.
|
||||
mpiexec_cmd = f"mpiexec {KAIJUHOME}/scripts/preproc/pinCpuCores.sh"
|
||||
@@ -178,13 +178,10 @@ def weekly_dash(args: dict):
|
||||
pbs_options["job_priority"] = os.environ["DERECHO_TESTING_PRIORITY"]
|
||||
pbs_options["walltime"] = "08:00:00"
|
||||
pbs_options["modules"] = module_names
|
||||
pbs_options["condarc"] = os.environ["CONDARC"]
|
||||
pbs_options["conda_envs_path"] = os.environ["CONDA_ENVS_PATH"]
|
||||
pbs_options["conda_environment"] = os.environ["CONDA_ENVIRONMENT"]
|
||||
pbs_options["mage_test_root"] = os.environ["MAGE_TEST_ROOT"]
|
||||
pbs_options["mage_test_set_root"] = os.environ["MAGE_TEST_SET_ROOT"]
|
||||
pbs_options["kaijuhome"] = KAIJUHOME
|
||||
pbs_options["kaipy_private_root"] = os.environ["KAIPY_PRIVATE_ROOT"]
|
||||
pbs_options["tmpdir"] = os.environ["TMPDIR"]
|
||||
pbs_options["slack_bot_token"] = os.environ["SLACK_BOT_TOKEN"]
|
||||
pbs_options["branch_or_commit"] = os.environ["BRANCH_OR_COMMIT"]
|
||||
@@ -192,7 +189,7 @@ def weekly_dash(args: dict):
|
||||
pbs_options["make_cmd"] = make_cmd
|
||||
pbs_options["genLFM_cmd"] = genLFM_cmd
|
||||
pbs_options["cda2wind_cmd"] = cda2wind_cmd
|
||||
pbs_options["genRCM_cmd"] = genRCM_cmd
|
||||
pbs_options["genRAIJU_cmd"] = genRAIJU_cmd
|
||||
pbs_options["mpiexec_cmd"] = mpiexec_cmd
|
||||
pbs_options["voltron_cmd"] = voltron_cmd
|
||||
|
||||
|
||||
@@ -121,9 +121,6 @@ REMIX_NORTH_QUICKLOOK_MASTER = os.path.join(
|
||||
REMIX_SOUTH_QUICKLOOK_MASTER = os.path.join(
|
||||
REFERENCE_RESULTS_DIRECTORY_MASTER, 'remix_s.png'
|
||||
)
|
||||
RCM_QUICKLOOK_MASTER = os.path.join(
|
||||
REFERENCE_RESULTS_DIRECTORY_MASTER, 'qkrcmpic.png'
|
||||
)
|
||||
|
||||
# Compute the paths to the quicklook plots for the development branch.
|
||||
MAGNETOSPHERE_QUICKLOOK_DEVELOPMENT = os.path.join(
|
||||
@@ -135,9 +132,6 @@ REMIX_NORTH_QUICKLOOK_DEVELOPMENT = os.path.join(
|
||||
REMIX_SOUTH_QUICKLOOK_DEVELOPMENT = os.path.join(
|
||||
REFERENCE_RESULTS_DIRECTORY_DEVELOPMENT, 'remix_s.png'
|
||||
)
|
||||
RCM_QUICKLOOK_DEVELOPMENT = os.path.join(
|
||||
REFERENCE_RESULTS_DIRECTORY_DEVELOPMENT, 'qkrcmpic.png'
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
@@ -908,27 +902,6 @@ def main():
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Make the RCM quick-look plot.
|
||||
if verbose:
|
||||
print(f"Creating RCM quicklook plot for {os.getcwd()}.")
|
||||
|
||||
# Create the plot.
|
||||
cmd = 'rcmpic.py'
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
_ = subprocess.run(cmd, shell=True, check=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(
|
||||
'ERROR: Unable to create RCM quicklook plot.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
f'See log for output.\n',
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Create merged images for the quicklook plots.
|
||||
|
||||
# Merge magnetosphere quicklooks.
|
||||
@@ -988,25 +961,6 @@ def main():
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
# Merge RCM quicklooks.
|
||||
cmd = (
|
||||
f"convert {RCM_QUICKLOOK_MASTER}"
|
||||
f" {RCM_QUICKLOOK_DEVELOPMENT}"
|
||||
' qkrcmpic.png -append combined_qkrcmpic.png'
|
||||
)
|
||||
if debug:
|
||||
print(f"cmd = {cmd}")
|
||||
try:
|
||||
cproc = subprocess.run(cmd, shell=True, check=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(
|
||||
'ERROR: Unable to combine RCM quicklook plots.\n'
|
||||
f"e.cmd = {e.cmd}\n"
|
||||
f"e.returncode = {e.returncode}\n"
|
||||
f'See log for output.\n',
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# List the files to post and their comments.
|
||||
@@ -1017,11 +971,9 @@ def main():
|
||||
'qkmsphpic.png',
|
||||
'remix_n.png',
|
||||
'remix_s.png',
|
||||
'qkrcmpic.png',
|
||||
'combined_msphpic.png',
|
||||
'combined_remix_n.png',
|
||||
'combined_remix_s.png',
|
||||
'combined_qkrcmpic.png'
|
||||
]
|
||||
comments_to_post = [
|
||||
'Real-Time Performance\n\n',
|
||||
@@ -1030,11 +982,9 @@ def main():
|
||||
'Magnetosphere Quicklook Plots\n\n',
|
||||
'REMIX (north) Quicklook Plots\n\n',
|
||||
'REMIX (south) Quicklook Plots\n\n',
|
||||
'RCM Quicklook Plots\n\n',
|
||||
'Magnetosphere Quicklook Comparison Plots\n\n',
|
||||
'REMIX (north) Quicklook Comparison Plots\n\n',
|
||||
'REMIX (south) Quicklook Comparison Plots\n\n',
|
||||
'RCM Quicklook Comparison Plots\n\n'
|
||||
]
|
||||
|
||||
# If loud mode is on, post results to Slack.
|
||||
|
||||
@@ -19,11 +19,9 @@ module load {{ module }}
|
||||
module list
|
||||
|
||||
echo 'Loading python environment.'
|
||||
mage_test_root=$HOME
|
||||
if [ -d "${mage_test_root}/miniconda3" ]; then
|
||||
echo 'Loading local miniconda3'
|
||||
mage_test_root='{{ mage_test_root }}'
|
||||
export CONDARC="${mage_test_root}/.condarc"
|
||||
export CONDA_ENVS_PATH="${mage_test_root}/.conda"
|
||||
mage_miniconda3="${mage_test_root}/miniconda3"
|
||||
mage_conda="${mage_miniconda3}/bin/conda"
|
||||
__conda_setup="$($mage_conda 'shell.bash' 'hook' 2> /dev/null)"
|
||||
|
||||
@@ -9,6 +9,8 @@ module testVoltGridGen
|
||||
implicit none
|
||||
|
||||
character(len=strLen) :: xmlName = 'voltGridTests.xml'
|
||||
integer, parameter :: gamRes = 64
|
||||
!! genVoltShellGrid expects a gamera resolution to determine defaults for its own
|
||||
|
||||
contains
|
||||
|
||||
@@ -36,7 +38,7 @@ module testVoltGridGen
|
||||
call xmlInp%Set_Val(Nt, "grid/Nt", -1) ! -1 so things blow up if xml isn't set properly
|
||||
call xmlInp%Set_Val(Np, "grid/Np", -1) ! -1 so things blow up if xml isn't set properly
|
||||
|
||||
call genVoltShellGrid(vApp, xmlInp)
|
||||
call genVoltShellGrid(vApp, xmlInp, gamRes)
|
||||
|
||||
@assertEqual(vApp%shGrid%Nt, 2*Nt, "Wrong amount of theta cells")
|
||||
@assertEqual(vApp%shGrid%Np, Np , "Wrong amount of phi cells")
|
||||
@@ -62,7 +64,7 @@ module testVoltGridGen
|
||||
call xmlInp%Set_Val(Nt, "grid/Nt", -1) ! -1 so things blow up if xml isn't set properly
|
||||
call xmlInp%Set_Val(Np, "grid/Np", -1) ! -1 so things blow up if xml isn't set properly
|
||||
|
||||
call genVoltShellGrid(vApp, xmlInp)
|
||||
call genVoltShellGrid(vApp, xmlInp, gamRes)
|
||||
|
||||
@assertEqual(2*Nt, vApp%shGrid%Nt, "Wrong amount of theta cells")
|
||||
@assertEqual( Np, vApp%shGrid%Np, "Wrong amount of phi cells")
|
||||
|
||||
Reference in New Issue
Block a user