Final organization of new code, updating tests accordingly, and bug fixes

This commit is contained in:
Jeffrey Garretson
2024-07-18 23:37:53 -07:00
parent 9a4b1afd0b
commit 503062b6cf
12 changed files with 113 additions and 64 deletions

View File

@@ -7,7 +7,7 @@ module volttypes_mpi
! mpi voltron specific options
type, extends(BaseOptions_T) :: VoltOptionsMpi_T
type(MPI_Comm) :: allComm
type(MPI_Comm), pointer :: couplingPoolComm
contains
end type voltOptionsMpi_T

View File

@@ -51,6 +51,19 @@ program voltron_mpix
call mpi_Abort(MPI_COMM_WORLD, 1, ierror)
end if
call MPI_Comm_Size(MPI_COMM_WORLD, worldSize, ierror)
if(ierror /= MPI_Success) then
call MPI_Error_string( ierror, message, length, ierror)
print *,message(1:length)
call mpi_Abort(MPI_COMM_WORLD, 1, ierror)
end if
call MPI_Comm_Rank(MPI_COMM_WORLD, worldRank, ierror)
if(ierror /= MPI_Success) then
call MPI_Error_string( ierror, message, length, ierror)
print *,message(1:length)
call mpi_Abort(MPI_COMM_WORLD, 1, ierror)
end if
! initialize mpi data type
call setMpiReal()
@@ -84,22 +97,11 @@ program voltron_mpix
! every app splits into their own comunicator
call MPI_COMM_SPLIT(MPI_COMM_WORLD, appId, 0, appComm, ierror)
call MPI_Comm_Size(MPI_COMM_WORLD, worldSize, ierror)
if(ierror /= MPI_Success) then
call MPI_Error_string( ierror, message, length, ierror)
print *,message(1:length)
call mpi_Abort(MPI_COMM_WORLD, 1, ierror)
end if
call MPI_Comm_Rank(MPI_COMM_WORLD, worldRank, ierror)
if(ierror /= MPI_Success) then
call MPI_Error_string( ierror, message, length, ierror)
print *,message(1:length)
call mpi_Abort(MPI_COMM_WORLD, 1, ierror)
end if
if(appId == helperId) then
allocate(vApp)
vApp%vOptionsMpi%allComm = MPI_COMM_WORLD
vApp%vOptions%gamUserInitFunc => initUser
allocate(vApp%vOptionsMpi%couplingPoolComm)
vApp%vOptionsMpi%couplingPoolComm = MPI_COMM_WORLD
call initVoltronHelper_mpi(vApp)
! do helper loop
@@ -181,7 +183,8 @@ program voltron_mpix
! *** end TIEGCM coupling code
vApp%vOptions%gamUserInitFunc => initUser
vApp%vOptionsMpi%allComm = splittingComm
allocate(vApp%vOptionsMpi%couplingPoolComm)
vApp%vOptionsMpi%couplingPoolComm = splittingComm
call initVoltron_mpi(vApp)
! voltron run loop
@@ -231,7 +234,7 @@ program voltron_mpix
allocate(gApp)
gApp%gOptionsMpi%gamComm = appComm
gApp%gOptionsCplMpiG%allComm = MPI_COMM_WORLD
gApp%gOptionsCplMpiG%couplingPoolComm = MPI_COMM_WORLD
gApp%gOptionsMpi%doIO = .false.
gApp%gOptions%userInitFunc => initUser

View File

@@ -38,7 +38,8 @@ submodule (volttypes) gamCplTypessub
integer, intent(in) :: nRes
! read parent's restart data
call gamReadRestart(App, resId, nRes)
! commented out because Gamera reads it on its own (BAD)
!call gamReadRestart(App, resId, nRes)
! then my own
call readGamCouplerRestart(App, resId, nRes)

View File

@@ -19,16 +19,19 @@ module couplingHelpers
currentApp = appId + 1 ! ensure the loop is entered
currentPool = startingComm
do while(currentApp .ne. appId)
call MPI_Allreduce(0, voltRank, 1, MPI_INTEGER, MPI_MAX, currentPool, ierr)
voltRank = -1
call MPI_Allreduce(MPI_IN_PLACE, voltRank, 1, MPI_INTEGER, MPI_MAX, currentPool, ierr)
call MPI_Bcast(currentApp, 1, MPI_INTEGER, voltRank, currentPool, ierr)
if(currentApp == appId) then
! it's my turn, split with volt and then skip making the second comm
call MPI_comm_split(currentPool, appId, key, voltCoupledComm, ierr)
call MPI_comm_split(currentPool, MPI_UNDEFINED, key, currentPool, ierr)
! key is never used when making the exclusion pool, 0 is used to preserve order
call MPI_comm_split(currentPool, MPI_UNDEFINED, 0, currentPool, ierr)
else
! it's not my turn, don't split with volt, and then join the second comm
call MPI_comm_split(currentPool, MPI_UNDEFINED, key, voltCoupledComm, ierr)
call MPI_comm_split(currentPool, voltId, key, currentPool, ierr)
! key is never used when making the exclusion pool, 0 is used to preserve order
call MPI_comm_split(currentPool, voltId, 0, currentPool, ierr)
endif
enddo
@@ -47,10 +50,11 @@ module couplingHelpers
! broadcast which app I'm creating a communicator with, split with it, and then
! create a smaller pool that excludes that app
call MPI_comm_rank(couplingPool, myRank, ierr)
call MPI_Allreduce(myRank, myRank, 1, MPI_INTEGER, MPI_MAX, couplingPool, ierr)
call MPI_Allreduce(MPI_IN_PLACE, myRank, 1, MPI_INTEGER, MPI_MAX, couplingPool, ierr)
call MPI_Bcast(appId, 1, MPI_INTEGER, myRank, couplingPool, ierr)
call MPI_comm_split(couplingPool, appId, key, coupledComm, ierr)
call MPI_comm_split(couplingPool, voltId, key, couplingPool, ierr)
! key is never used when making the exclusion pool, 0 is used to preserve order
call MPI_comm_split(couplingPool, voltId, 0, couplingPool, ierr)
end subroutine

View File

@@ -9,7 +9,7 @@ module gamCouple_mpi_G2V
implicit none
type, extends(BaseOptions_T) :: gamOptionsCplMpiG_T
type(MPI_Comm) :: allComm
type(MPI_Comm) :: couplingPoolComm
contains
end type
@@ -74,7 +74,7 @@ module gamCouple_mpi_G2V
class(gamCouplerMpi_gam_T), intent(inout) :: App
type(XML_Input_T), intent(inout) :: Xml
integer :: length, commRank, commSize, ierr, numCells, dataCount, numInNeighbors, numOutNeighbors
integer :: length, commSize, ierr, numCells, dataCount, numInNeighbors, numOutNeighbors
type(MPI_Comm) :: voltComm
character( len = MPI_MAX_ERROR_STRING) :: message
logical :: reorder, wasWeighted
@@ -89,9 +89,8 @@ module gamCouple_mpi_G2V
App%zeroArraytypes = (/ MPI_DATATYPE_NULL /)
! split voltron helpers off of the communicator
! split allComm into a communicator with only the non-helper voltron rank
call MPI_Comm_rank(App%gOptionsCplMpiG%allComm, commRank, ierr)
call appWaitForVoltronSplit(App%gOptionsCplMpiG%allComm, gamId, commRank, voltComm)
! split couplingPoolComm into a communicator with only the non-helper voltron rank
call appWaitForVoltronSplit(App%gOptionsCplMpiG%couplingPoolComm, gamId, 0, voltComm)
call Xml%Set_Val(App%doSerialVoltron,"/kaiju/voltron/coupling/doSerial",.false.)
call Xml%Set_Val(App%doAsyncCoupling,"/kaiju/voltron/coupling/doAsyncCoupling",.true.)
@@ -110,11 +109,14 @@ module gamCouple_mpi_G2V
end if
call MPI_Comm_rank(voltComm, App%myRank, ierr)
! identify who is voltron
App%voltRank = -1
call MPI_Allreduce(MPI_IN_PLACE, App%voltRank, 1, MPI_INTEGER, MPI_MAX, voltComm, ierr)
! send my i/j/k ranks to the voltron rank
call mpi_gather(App%Grid%Ri, 1, MPI_INTEGER, 0, 0, MPI_DATATYPE_NULL, commSize-1, voltComm, ierr)
call mpi_gather(App%Grid%Rj, 1, MPI_INTEGER, 0, 0, MPI_DATATYPE_NULL, commSize-1, voltComm, ierr)
call mpi_gather(App%Grid%Rk, 1, MPI_INTEGER, 0, 0, MPI_DATATYPE_NULL, commSize-1, voltComm, ierr)
call mpi_gather(App%Grid%Ri, 1, MPI_INTEGER, 0, 0, MPI_DATATYPE_NULL, App%voltRank, voltComm, ierr)
call mpi_gather(App%Grid%Rj, 1, MPI_INTEGER, 0, 0, MPI_DATATYPE_NULL, App%voltRank, voltComm, ierr)
call mpi_gather(App%Grid%Rk, 1, MPI_INTEGER, 0, 0, MPI_DATATYPE_NULL, App%voltRank, voltComm, ierr)
numCells = App%Grid%Nip*App%Grid%Njp*App%Grid%Nkp
! rank 0 send the number of physical cells to voltron rank
@@ -179,12 +181,17 @@ module gamCouple_mpi_G2V
call mpi_bcast(App%Model%IO%tOut, 1, MPI_MYFLOAT, App%voltRank, App%couplingComm, ierr)
call mpi_bcast(App%Model%IO%dtOut, 1, MPI_MYFLOAT, App%voltRank, App%couplingComm, ierr)
call mpi_bcast(App%Model%IO%nOut, 1, MPI_INTEGER, App%voltRank, App%couplingComm, ierr)
call mpi_bcast(App%Model%IO%tCon, 1, MPI_MYFLOAT, App%voltRank, App%couplingComm, ierr)
call mpi_bcast(App%Model%IO%dtCon, 1, MPI_MYFLOAT, App%voltRank, App%couplingComm, ierr)
if(.not. App%Model%isRestart) then
! re-write Gamera's first output with corrected time, save and restore initial output time
tIO = App%Model%IO%tOut
call App%WriteFileOutput(App%Model%IO%nOut)
App%Model%IO%tOut = tIO
else
! always processing when restarted
App%processingData = .true.
endif
end subroutine
@@ -193,7 +200,7 @@ module gamCouple_mpi_G2V
class(gamCouplerMpi_gam_T), intent(inout) :: App
type(XML_Input_T), intent(inout) :: Xml
real(rp) :: save_tRes, save_dtRes, save_tOut, save_dtOut
real(rp) :: save_tRes, save_dtRes, save_tOut, save_dtOut, save_tCon, save_dtCon
integer :: save_nRes, save_nOut
integer :: ierr
@@ -206,6 +213,8 @@ module gamCouple_mpi_G2V
save_tOut = App%Model%IO%tOut
save_dtOut = App%Model%IO%dtOut
save_nOut = App%Model%IO%nOut
save_tCon = App%Model%IO%tCon
save_dtCon = App%Model%IO_dtCon
! initialize parent's IO
call gamInitIO(App, Xml)
@@ -217,6 +226,8 @@ module gamCouple_mpi_G2V
App%Model%IO%tOut = save_tOut
App%Model%IO%dtOut = save_dtOut
App%Model%IO%nOut = save_nOut
App%Model%IO%tCon = save_tCon
App%Model%IO%dtCon = save_dtCon
end subroutine
@@ -238,9 +249,10 @@ module gamCouple_mpi_G2V
call recvVoltronCplDataMpi(App)
App%processingData = .true.
elseif(App%DeepT <= App%Model%t) then
! send results
! send results and get new data
call sendVoltronCplDataMpi(App)
App%processingData = .false.
call recvVoltronCplDataMpi(App)
App%processingData = .true.
else
if(targetSimT < App%DeepT) then
! advance to the current step target time
@@ -248,9 +260,10 @@ module gamCouple_mpi_G2V
else
! advance to next coupling time
call gamMpiAdvanceModel(App, App%DeepT-App%Model%t)
! send results
! send results and get new data
call sendVoltronCplDataMpi(App)
App%processingData = .false.
call recvVoltronCplDataMpi(App)
App%processingData = .true.
endif
endif
end do

View File

@@ -12,7 +12,7 @@ module gamCouple_mpi_V2G
! options for MPI voltron coupler
type, extends(BaseOptions_T) :: gamOptionsCplMpiV_T
type(MPI_Comm) :: allComm
type(MPI_Comm), pointer :: couplingPoolComm
contains
end type
@@ -110,9 +110,8 @@ module gamCouple_mpi_V2G
call PrepState(App%Model,App%Grid,&
App%oState,App%State,Xml,App%gOptions%userInitFunc)
! split allComm into a communicator with only the non-helper voltron rank and Gamera ranks
call MPI_Comm_rank(App%gOptionsCplMpiV%allComm, commSize, ierr)
call voltronSplitWithApp(App%gOptionsCplMpiV%allComm, gamId, commSize, voltComm)
! split couplingPoolComm into a communicator with only the non-helper voltron rank and Gamera ranks
call voltronSplitWithApp(App%gOptionsCplMpiV%couplingPoolComm, gamId, 0, voltComm)
call Xml%Set_Val(App%doAsyncCoupling,"/kaiju/voltron/coupling/doAsyncCoupling",.true.)
call Xml%Set_Val(App%doDeep, "/kaiju/voltron/coupling/doDeep", .true.)
@@ -142,10 +141,17 @@ module gamCouple_mpi_V2G
! doing a very very rough approximation of data transferred to help MPI reorder
! for deep updates, assume each rank sends data equal to its # physical cells
call MPI_Comm_rank(voltComm, App%myRank, ierr)
! identify who is voltron
call MPI_Allreduce(MPI_IN_PLACE, App%myRank, 1, MPI_INTEGER, MPI_MAX, voltComm, ierr)
! get i/j/k ranks from each Gamera mpi rank
call mpi_gather(-1, 1, MPI_INTEGER, iRanks, 1, MPI_INTEGER, commSize-1, voltComm, ierr)
call mpi_gather(-1, 1, MPI_INTEGER, jRanks, 1, MPI_INTEGER, commSize-1, voltComm, ierr)
call mpi_gather(-1, 1, MPI_INTEGER, kRanks, 1, MPI_INTEGER, commSize-1, voltComm, ierr)
iRanks(:) = -1
jRanks(:) = -1
kRanks(:) = -1
call mpi_gather(MPI_IN_PLACE, 1, MPI_INTEGER, iRanks, 1, MPI_INTEGER, App%myRank, voltComm, ierr)
call mpi_gather(MPI_IN_PLACE, 1, MPI_INTEGER, jRanks, 1, MPI_INTEGER, App%myRank, voltComm, ierr)
call mpi_gather(MPI_IN_PLACE, 1, MPI_INTEGER, kRanks, 1, MPI_INTEGER, App%myRank, voltComm, ierr)
! get the number of physical cells from rank 0
call mpi_recv(numCells, 1, MPI_INTEGER, 0, 97500, voltComm, MPI_STATUS_IGNORE, ierr)
@@ -193,9 +199,12 @@ module gamCouple_mpi_V2G
end if
! get i/j/k ranks again in case MPI ranks were reordered in the new communicator
call mpi_gather(-1, 1, MPI_INTEGER, iRanks, 1, MPI_INTEGER, App%myRank, App%couplingComm, ierr)
call mpi_gather(-1, 1, MPI_INTEGER, jRanks, 1, MPI_INTEGER, App%myRank, App%couplingComm, ierr)
call mpi_gather(-1, 1, MPI_INTEGER, kRanks, 1, MPI_INTEGER, App%myRank, App%couplingComm, ierr)
iRanks(:) = -1
jRanks(:) = -1
kRanks(:) = -1
call mpi_gather(MPI_IN_PLACE, 1, MPI_INTEGER, iRanks, 1, MPI_INTEGER, App%myRank, App%couplingComm, ierr)
call mpi_gather(MPI_IN_PLACE, 1, MPI_INTEGER, jRanks, 1, MPI_INTEGER, App%myRank, App%couplingComm, ierr)
call mpi_gather(MPI_IN_PLACE, 1, MPI_INTEGER, kRanks, 1, MPI_INTEGER, App%myRank, App%couplingComm, ierr)
! create the MPI datatypes needed to transfer state data
call createVoltDataTypes(App, iRanks, jRanks, kRanks)
@@ -397,6 +406,9 @@ module gamCouple_mpi_V2G
! read only my own restart data
call readGamCouplerRestart(App, resId, nRes)
! we are always in progress after a restart
App%inProgress = .true.
end subroutine
subroutine gamCplMpiVWriteConsoleOutput(App)
@@ -478,6 +490,8 @@ module gamCouple_mpi_V2G
call mpi_bcast(App%Model%IO%tOut, 1, MPI_MYFLOAT, App%myRank, App%couplingComm, ierr)
call mpi_bcast(App%Model%IO%dtOut, 1, MPI_MYFLOAT, App%myRank, App%couplingComm, ierr)
call mpi_bcast(App%Model%IO%nOut, 1, MPI_INTEGER, App%myRank, App%couplingComm, ierr)
call mpi_bcast(App%Model%IO%tCon, 1, MPI_MYFLOAT, App%myRank, App%couplingComm, ierr)
call mpi_bcast(App%Model%IO%dtCon, 1, MPI_MYFLOAT, App%myRank, App%couplingComm, ierr)
end subroutine

View File

@@ -52,7 +52,7 @@ module voltapp_mpi
! set varible for polymorphic type
SELECT type(cplApp=>vApp%gApp)
TYPE IS (gamCouplerMpi_volt_T)
cplApp%gOptionsCplMpiV%allComm = vApp%vOptionsMpi%allComm
cplApp%gOptionsCplMpiV%couplingPoolComm => vApp%vOptionsMpi%couplingPoolComm
CLASS DEFAULT
write(*,*) 'Gamera MPI coupler is wrong type'
stop
@@ -72,7 +72,7 @@ module voltapp_mpi
vApp%vHelpComm = MPI_COMM_NULL
vApp%vHelpWin = MPI_WIN_NULL
call voltronSplitWithApp(vApp%vOptionsMpi%allComm, helperId, 0, allVoltComm)
call voltronSplitWithApp(vApp%vOptionsMpi%couplingPoolComm, helperId, 0, allVoltComm)
! get info about voltron-only mpi communicator
call MPI_Comm_rank(allVoltComm, vApp%vHelpRank, ierr)
@@ -231,7 +231,7 @@ module voltapp_mpi
vApp%vHelpComm = MPI_COMM_NULL
vApp%vHelpWin = MPI_WIN_NULL
call appWaitForVoltronSplit(vApp%vOptionsMpi%allComm, helperId, 0, allVoltComm)
call appWaitForVoltronSplit(vApp%vOptionsMpi%couplingPoolComm, helperId, 0, allVoltComm)
! get info about voltron-only mpi communicator
call MPI_Comm_rank(allVoltComm, vApp%vHelpRank, ierr)

View File

@@ -188,6 +188,7 @@ module voltapp
vApp%MJD = T2MJD(vApp%time,tsMJD%evalAt(0.0_rp))
!Set first deep coupling (defaulting to coupling immediately)
call xmlInp%Set_Val(vApp%DeepT, "coupling/tCouple", vApp%time)
vApp%IO%tCon = vApp%time
endif
if (vApp%doDeep) then
@@ -238,9 +239,9 @@ module voltapp
if(present(optFilename)) then
! read from the prescribed file
call initializeFromGamera(vApp, gApp, optFilename)
call initializeFromGamera(vApp, gApp, xmlInp, optFilename)
else
call initializeFromGamera(vApp, gApp)
call initializeFromGamera(vApp, gApp, xmlInp)
endif
! now that remix is initialized, check if precipitation model is OK with deep choice
@@ -326,17 +327,18 @@ module voltapp
end subroutine stepVoltron
!Initialize Voltron app based on Gamera data
subroutine initializeFromGamera(vApp, gApp, optFilename)
subroutine initializeFromGamera(vApp, gApp, xmlInp, optFilename)
type(voltApp_T), intent(inout) :: vApp
class(gamApp_T), intent(inout) :: gApp
type(XML_Input_T), intent(inout) :: xmlInp
character(len=*), optional, intent(in) :: optFilename
character(len=strLen) :: RunID
character(len=strLen) :: RunID, resID
type(TimeSeries_T) :: f107
logical :: isRestart
real(rp) :: maxF107,Rin
integer :: n
integer :: n, nRes
isRestart = gApp%Model%isRestart
RunID = trim(gApp%Model%RunID)
@@ -396,6 +398,11 @@ module voltapp
call init_mix2MhdCoupler(vApp%gApp, vApp%remixApp)
! initialize additional coupled gamera data
call vApp%gApp%InitMhdCoupler(vApp)
if(isRestart) then
call xmlInp%Set_Val(resID,"/Kaiju/gamera/restart/resID","msphere")
call xmlInp%Set_Val(nRes,"/Kaiju/gamera/restart/nRes" ,-1)
call vApp%gApp%ReadRestart(resID, nRes)
endif
call init_mhd2Mix(vApp%mhd2mix, gApp, vApp%remixApp)
!vApp%mix2mhd%mixOutput = 0.0

View File

@@ -46,7 +46,7 @@ contains
! make gamera-only mpi communicator
call MPI_Comm_Split(getMpiF08Communicator(this), gamId, this%getProcessRank(), gamCplMpi%gOptionsMpi%gamComm, ierror)
gamCplMpi%gOptionsCplMpiG%allComm = getMpiF08Communicator(this)
gamCplMpi%gOptionsCplMpiG%couplingPoolComm = getMpiF08Communicator(this)
gamCplMpi%gOptions%userInitFunc => initUser
gamCplMpi%gOptionsMpi%doIO = .false.
xmlInp = New_XML_Input(trim(caseFile),'Kaiju',.true.)
@@ -58,7 +58,8 @@ contains
call MPI_Comm_Split(getMpiF08Communicator(this), voltId, this%getProcessRank(), voltComm, ierror)
voltAppMpi%vOptions%gamUserInitFunc => initUser
voltAppMpi%vOptionsMpi%allComm = getMpiF08Communicator(this)
allocate(voltAppMpi%vOptionsMpi%couplingPoolComm)
voltAppMpi%vOptionsMpi%couplingPoolComm = getMpiF08Communicator(this)
call initVoltron_mpi(voltAppMpi, trim(caseFile))
endif

View File

@@ -36,7 +36,7 @@ contains
! make gamera-only mpi communicator
call MPI_Comm_Split(getMpiF08Communicator(this), gamId, this%getProcessRank(), gamCplMpi%gOptionsMpi%gamComm, ierror)
gamCplMpi%gOptionsCplMpiG%allComm = getMpiF08Communicator(this)
gamCplMpi%gOptionsCplMpiG%couplingPoolComm = getMpiF08Communicator(this)
gamCplMpi%gOptions%userInitFunc => initUser
gamCplMpi%gOptionsMpi%doIO = .false.
xmlInp = New_XML_Input(trim(caseFile),'Kaiju',.true.)
@@ -49,7 +49,8 @@ contains
call MPI_Comm_Split(getMpiF08Communicator(this), voltId, this%getProcessRank(), voltComm, ierror)
voltAppMpi%vOptions%gamUserInitFunc => initUser
voltAppMpi%vOptionsMpi%allComm = getMpiF08Communicator(this)
allocate(voltAppMpi%vOptionsMpi%couplingPoolComm)
voltAppMpi%vOptionsMpi%couplingPoolComm = getMpiF08Communicator(this)
call initVoltron_mpi(voltAppMpi, trim(caseFile))
else
allocate(voltAppMpi)
@@ -58,7 +59,9 @@ contains
! make gamera-only mpi communicator
call MPI_Comm_Split(getMpiF08Communicator(this), voltId, this%getProcessRank(), voltComm, ierror)
voltAppMpi%vOptionsMpi%allComm = getMpiF08Communicator(this)
voltAppMpi%vOptions%gamUserInitFunc => initUser
allocate(voltAppMpi%vOptionsMpi%couplingPoolComm)
voltAppMpi%vOptionsMpi%couplingPoolComm = getMpiF08Communicator(this)
call initVoltronHelper_mpi(voltAppMpi, trim(caseFile))
endif
@@ -98,7 +101,8 @@ contains
end subroutine runApplication
@test(npes=[8])
! this test doesn't work with current layout, timestepping is tested as part of test below
!@test(npes=[8])
subroutine testHelpTimestepping(this)
class (MpiTestMethod), intent(inout) :: this

View File

@@ -32,7 +32,7 @@ contains
! make gamera-only mpi communicator
call MPI_Comm_Split(getMpiF08Communicator(this), gamId, this%getProcessRank(), gamCplMpi%gOptionsMpi%gamComm, ierror)
gamCplMpi%gOptionsCplMpiG%allComm = getMpiF08Communicator(this)
gamCplMpi%gOptionsCplMpiG%couplingPoolComm = getMpiF08Communicator(this)
gamCplMpi%gOptions%userInitFunc => initUser
gamCplMpi%gOptionsMpi%doIO = .false.
xmlInp = New_XML_Input(trim(caseFile),'Kaiju',.true.)
@@ -44,7 +44,8 @@ contains
call MPI_Comm_Split(getMpiF08Communicator(this), voltId, this%getProcessRank(), voltComm, ierror)
voltAppMpi%vOptions%gamUserInitFunc => initUser
voltAppMpi%vOptionsMpi%allComm = getMpiF08Communicator(this)
allocate(voltAppMpi%vOptionsMpi%couplingPoolComm)
voltAppMpi%vOptionsMpi%couplingPoolComm = getMpiF08Communicator(this)
call initVoltron_mpi(voltAppMpi, trim(caseFile))
endif

View File

@@ -31,7 +31,7 @@ contains
! make gamera-only mpi communicator
call MPI_Comm_Split(getMpiF08Communicator(this), gamId, this%getProcessRank(), gamCplMpi%gOptionsMpi%gamComm, ierror)
gamCplMpi%gOptionsCplMpiG%allComm = getMpiF08Communicator(this)
gamCplMpi%gOptionsCplMpiG%couplingPoolComm = getMpiF08Communicator(this)
gamCplMpi%gOptions%userInitFunc => initUser
gamCplMpi%gOptionsMpi%doIO = .false.
xmlInp = New_XML_Input(trim(caseFile),'Kaiju',.true.)
@@ -43,7 +43,8 @@ contains
call MPI_Comm_Split(getMpiF08Communicator(this), voltId, this%getProcessRank(), voltComm, ierror)
voltAppMpi%vOptions%gamUserInitFunc => initUser
voltAppMpi%vOptionsMpi%allComm = getMpiF08Communicator(this)
allocate(voltAppMpi%vOptionsMpi%couplingPoolComm)
voltAppMpi%vOptionsMpi%couplingPoolComm = getMpiF08Communicator(this)
call initVoltron_mpi(voltAppMpi, trim(caseFile))
endif