diff --git a/cmake/compilers.cmake b/cmake/compilers.cmake index aa1c554c..8f068e61 100644 --- a/cmake/compilers.cmake +++ b/cmake/compilers.cmake @@ -13,7 +13,21 @@ set(CMAKE_REQUIRED_FLAGS ${OpenMP_Fortran_FLAGS}) set(CMAKE_REQUIRED_LIBRARIES ${OpenMP_Fortran_LIBRARIES}) if (ENABLE_MPI) - find_package(MPI REQUIRED COMPONENTS Fortran) + #mpi is a nightmare + #try to find a fortran 2008 specific wrapper first + set(MPI_Fortran_COMPILER mpif08) + find_package(MPI COMPONENTS Fortran QUIET) + if(NOT MPI_FOUND OR NOT MPI_Fortran_HAVE_F08_MODULE) + #just look for whatever + unset(MPI_Fortran_COMPILER) + find_package(MPI REQUIRED COMPONENTS Fortran) + else() + message("-- Found MPI") + endif() + + if(NOT MPI_Fortran_HAVE_F08_MODULE) + message(FATAL_ERROR "MPI Library does not support F08 interface") + endif() endif() #------------- @@ -104,13 +118,16 @@ if(ENABLE_OMP) string(APPEND CMAKE_Fortran_FLAGS " ${OpenMP_Fortran_FLAGS}") endif() if(ENABLE_MPI) - add_definitions(${MPI_Fortran_COMPILE_FLAGS}) - include_directories(${MPI_Fortran_INCLUDE_PATH}) + add_compile_options(${MPI_Fortran_COMPILE_OPTIONS}) + add_definitions(${MPI_Fortran_COMPILE_DEFINITIONS}) + include_directories(${MPI_Fortran_INCLUDE_DIRS}) link_directories(${MPI_Fortran_LIBRARIES}) - if(CMAKE_Fortran_COMPILER_ID MATCHES Intel) - string(APPEND CMAKE_Fortran_FLAGS " -mt_mpi") - endif() - # no matching flag for GNU + + if(MPI_Fortran_COMPILER MATCHES mpiifort) + #Using Intel MPI Library + string(APPEND CMAKE_Fortran_FLAGS " -mt_mpi") + endif() + set(CMAKE_Fortran_COMPILER ${MPI_Fortran_COMPILER}) # we changed compiler, link HDF5 libraries link_libraries(${HDF5_Fortran_LIBRARIES} ${HDF5_Fortran_HL_LIBRARIES}) diff --git a/src/base/mpi/mpidefs.F90 b/src/base/mpi/mpidefs.F90 index 0f72052d..b3bb4078 100644 --- a/src/base/mpi/mpidefs.F90 +++ b/src/base/mpi/mpidefs.F90 @@ -1,11 +1,11 @@ module mpidefs - use mpi + use mpi_f08 use kdefs, ONLY: sp,dp,rp implicit none - integer, public :: MPI_MYFLOAT + type(MPI_Datatype), public :: MPI_MYFLOAT #ifdef MPI_BASE_ADDR_SIZE integer, parameter :: MPI_BASE_MYADDR = MPI_BASE_ADDR_SIZE @@ -30,7 +30,7 @@ contains ! helper functions to print info about custom MPI datatypes subroutine simplePrintDataType(datatype) - integer, intent(in) :: datatype + type(MPI_Datatype), intent(in) :: datatype logical :: typeUsed typeUsed = printDataType(datatype) @@ -38,28 +38,20 @@ contains end subroutine recursive function printDataType(datatype) result(retVal) - integer, intent(in) :: datatype + type(MPI_Datatype), intent(in) :: datatype logical :: retVal integer :: numInts, numAdds, numDTs, combiner, ierr, i - integer, dimension(:), allocatable :: arrayInts, arrayDTs + integer, dimension(:), allocatable :: arrayInts + type(MPI_Datatype), dimension(:), allocatable :: arrayDTs integer(kind=MPI_BASE_MYADDR), dimension(:), allocatable :: arrayAdds + character(len=MPI_MAX_OBJECT_NAME) :: typeName call mpi_type_get_envelope(datatype, numInts, numAdds, numDTs, combiner, ierr) SELECT CASE(combiner) CASE (MPI_COMBINER_NAMED) - SELECT CASE (datatype) - CASE (MPI_INT) - write (*,*) 'Datatype is named: MPI_INT' - CASE (MPI_FLOAT) - write (*,*) 'Datatype is named: MPI_FLOAT' - CASE (MPI_DOUBLE) - write (*,*) 'Datatype is named: MPI_DOUBLE' - CASE (MPI_DOUBLE_PRECISION) - write (*,*) 'Datatype is named: MPI_DOUBLE_PRECISION' - CASE DEFAULT - write (*,*) 'Unhandled base named datatype in printDataType' - ENDSELECT + call mpi_type_get_name(datatype, typeName, i, ierr) + write (*,*) 'Datatype is named: ', trim(typeName) retVal = .false. RETURN CASE (MPI_COMBINER_STRUCT) diff --git a/src/drivers/voltron_mpix.F90 b/src/drivers/voltron_mpix.F90 index b58f5190..3281fec0 100644 --- a/src/drivers/voltron_mpix.F90 +++ b/src/drivers/voltron_mpix.F90 @@ -8,7 +8,7 @@ program voltron_mpix use output use voltio use uservoltic - use mpi + use mpi_f08 use xml_input implicit none @@ -25,7 +25,8 @@ program voltron_mpix procedure(StateIC_T), pointer :: userInitFunc => initUser - integer :: ierror, length, provided, worldSize, worldRank, gamComm, voltComm, numHelpers + integer :: ierror, length, provided, worldSize, worldRank, numHelpers + type(MPI_Comm) :: gamComm, voltComm integer :: required=MPI_THREAD_MULTIPLE character( len = MPI_MAX_ERROR_STRING) :: message character(len=strLen) :: inpXML, helpersBuf diff --git a/src/gamera/mpi/gamapp_mpi.F90 b/src/gamera/mpi/gamapp_mpi.F90 index ee4fc284..eb2b1981 100644 --- a/src/gamera/mpi/gamapp_mpi.F90 +++ b/src/gamera/mpi/gamapp_mpi.F90 @@ -8,22 +8,26 @@ module gamapp_mpi use gamapp use bcs_mpi use mpidefs - use mpi + use mpi_f08 implicit none type, extends(GamApp_T) :: gamAppMpi_T - integer :: gamMpiComm = MPI_COMM_NULL + type(MPI_Comm) :: gamMpiComm integer, dimension(:), allocatable :: sendRanks, recvRanks ! Gas Data Transfer Variables - integer, dimension(:), allocatable :: sendCountsGas, sendTypesGas - integer, dimension(:), allocatable :: recvCountsGas, recvTypesGas + integer, dimension(:), allocatable :: sendCountsGas + type(MPI_Datatype), dimension(:), allocatable :: sendTypesGas + integer, dimension(:), allocatable :: recvCountsGas + type(MPI_Datatype), dimension(:), allocatable :: recvTypesGas integer(kind=MPI_AN_MYADDR), dimension(:), allocatable :: sendDisplsGas, recvDisplsGas ! Magnetic Flux Data Transfer Variables - integer, dimension(:), allocatable :: sendCountsMagFlux, sendTypesMagFlux - integer, dimension(:), allocatable :: recvCountsMagFlux, recvTypesMagFlux + integer, dimension(:), allocatable :: sendCountsMagFlux + type(MPI_Datatype), dimension(:), allocatable :: sendTypesMagFlux + integer, dimension(:), allocatable :: recvCountsMagFlux + type(MPI_Datatype), dimension(:), allocatable :: recvTypesMagFlux integer(kind=MPI_AN_MYADDR), dimension(:), allocatable :: sendDisplsMagFlux, recvDisplsMagFlux ! Debugging flags @@ -37,7 +41,7 @@ module gamapp_mpi subroutine initGamera_mpi(gamAppMpi, userInitFunc, gamComm, optFilename, doIO) type(gamAppMpi_T), intent(inout) :: gamAppMpi procedure(StateIC_T), pointer, intent(in) :: userInitFunc - integer, intent(in) :: gamComm + type(MPI_Comm), intent(in) :: gamComm character(len=*), optional, intent(in) :: optFilename logical, optional, intent(in) :: doIO @@ -117,7 +121,7 @@ module gamapp_mpi type(gamAppMpi_T), intent(inout) :: gamAppMpi type(XML_Input_T), intent(inout) :: xmlInp procedure(StateIC_T), pointer, intent(in) :: userInitFunc - integer, intent(in) :: gamComm + type(MPI_Comm), intent(in) :: gamComm real(rp), optional, intent(in) :: endTime integer :: numNeighbors, ierr, length, commSize, rank, ic, jc, kc, rn @@ -661,7 +665,7 @@ module gamapp_mpi type(gamAppMpi_T), intent(inout) :: gamAppMpi integer :: ierr, length - integer :: gasReq, mfReq + type(MPI_Request) :: gasReq, mfReq character(len=strLen) :: message ! arrays for calculating mag flux face error, if applicable @@ -727,8 +731,8 @@ module gamapp_mpi type(gamAppMpi_T), intent(inout) :: gamAppMpi logical, intent(in) :: periodicI, periodicJ, periodicK - integer :: iData,jData,kData,rankIndex,dType,offset,dataSize,ierr - integer :: dtGas4,dtGas5 + integer :: iData,jData,kData,rankIndex,offset,dataSize,ierr + type(MPI_Datatype) :: dType,dtGas4,dtGas5 associate(Grid=>gamAppMpi%Grid,Model=>gamAppMpi%Model) @@ -879,7 +883,8 @@ module gamapp_mpi type(gamAppMpi_T), intent(in) :: gamAppMpi integer, intent(in) :: recvFromRank,iData,jData,kData logical, intent(in) :: periodicI, periodicJ, periodicK - integer, intent(out) :: dType, offset + type(MPI_Datatype), intent(out) :: dType + integer, intent(out) :: offset logical, intent(in) :: doFace, doEdge, doCorner integer :: tgtRank, calcOffset, ierr, dataSize @@ -974,7 +979,8 @@ module gamapp_mpi type(gamAppMpi_T), intent(in) :: gamAppMpi integer, intent(in) :: sendToRank,iData,jData,kData logical, intent(in) :: periodicI, periodicJ, periodicK - integer, intent(out) :: dType, offset + type(MPI_Datatype), intent(out) :: dType + integer, intent(out) :: offset logical, intent(in) :: doFace, doEdge, doCorner integer :: myRank, tempRank, sendToI, sendToJ, sendToK @@ -1108,10 +1114,11 @@ module gamapp_mpi doFace,doEdge,doCorner) type(gamAppMpi_T), intent(in) :: gamAppMpi integer, intent(in) :: iData, jData, kData - integer, intent(out) :: dType + type(MPI_Datatype), intent(out) :: dType logical, intent(in) :: doFace, doEdge, doCorner - integer dType1D,dType2D,dType3D,dataSize,ierr,dataSum + type(MPI_Datatype) :: dType1D,dType2D,dType3D + integer :: dataSize,ierr,dataSum associate(Grid=>gamAppMpi%Grid,Model=>gamAppMpi%Model) @@ -1176,7 +1183,8 @@ module gamapp_mpi type(gamAppMpi_T), intent(in) :: gamAppMpi integer, intent(in) :: recvFromRank,iData,jData,kData logical, intent(in) :: periodicI, periodicJ, periodicK - integer, intent(out) :: dType, offset + type(MPI_Datatype), intent(out) :: dType + integer, intent(out) :: offset logical, intent(in) :: doFace, doEdge, doCorner integer :: tgtRank, calcOffset, ierr, dataSize @@ -1272,7 +1280,8 @@ module gamapp_mpi type(gamAppMpi_T), intent(in) :: gamAppMpi integer, intent(in) :: sendToRank,iData,jData,kData logical, intent(in) :: periodicI, periodicJ, periodicK - integer, intent(out) :: dType, offset + type(MPI_Datatype), intent(out) :: dType + integer, intent(out) :: offset logical, intent(in) :: doFace, doEdge, doCorner integer :: myRank, tempRank, sendToI, sendToJ, sendToK @@ -1405,10 +1414,10 @@ module gamapp_mpi subroutine calcDatatypeFC(gamAppMpi,iData,jData,kData,dType,doFace,doEdge,doCorner) type(gamAppMpi_T), intent(in) :: gamAppMpi integer, intent(in) :: iData, jData, kData - integer, intent(out) :: dType + type(MPI_Datatype), intent(out) :: dType logical, intent(in) :: doFace, doEdge, doCorner - integer :: dType1DI,dType1DJ,dType1DK,dType2DI,dType2DJ,dType2DK,dType3DI,dType3DJ,dType3DK + type(MPI_Datatype) :: dType1DI,dType1DJ,dType1DK,dType2DI,dType2DJ,dType2DK,dType3DI,dType3DJ,dType3DK integer :: offsetI, offsetJ, offsetK, dataSum, ierr, dataSize logical :: anyMaxDim,sendSharedFace integer :: countArray(3) @@ -1551,8 +1560,9 @@ module gamapp_mpi end subroutine calcDatatypeFC subroutine appendDatatype(appendType,dType,offset) - integer, intent(inout) :: appendType - integer, intent(in) :: dType, offset + type(MPI_Datatype), intent(inout) :: appendType + type(MPI_Datatype), intent(in) :: dType + integer, intent(in) :: offset integer :: ierr integer(kind=MPI_BASE_MYADDR) :: tempOffsets(2) diff --git a/src/voltron/mpi/gam2VoltComm_mpi.F90 b/src/voltron/mpi/gam2VoltComm_mpi.F90 index 6330bcbe..513f6a24 100644 --- a/src/voltron/mpi/gam2VoltComm_mpi.F90 +++ b/src/voltron/mpi/gam2VoltComm_mpi.F90 @@ -4,13 +4,13 @@ module gam2VoltComm_mpi use gamapp_mpi use uservoltic use mpidefs - use mpi + use mpi_f08 use, intrinsic :: ieee_arithmetic, only: IEEE_VALUE, IEEE_SIGNALING_NAN, IEEE_QUIET_NAN implicit none type :: gam2VoltCommMpi_T - integer :: voltMpiComm = MPI_COMM_NULL + type(MPI_Comm) :: voltMpiComm integer :: myRank, voltRank logical :: doSerialVoltron = .false., doAsyncShallow = .true. logical :: firstShallowUpdate = .true., firstDeepUpdate = .true. @@ -20,29 +20,37 @@ module gam2VoltComm_mpi logical :: doDeep ! array of all zeroes to simplify various send/receive calls - integer, dimension(1) :: zeroArrayCounts = (/ 0 /), zeroArrayTypes = (/ MPI_INT /) + integer, dimension(1) :: zeroArrayCounts = (/ 0 /) + type(MPI_Datatype), dimension(1) :: zeroArrayTypes ! this can't be initialized here with mpi_f08, must be later integer(kind=MPI_AN_MYADDR), dimension(1) :: zeroArrayDispls = (/ 0 /) ! SHALLOW COUPLING VARIABLES - integer, dimension(1) :: sendCountsGasShallow, sendTypesGasShallow + integer, dimension(1) :: sendCountsGasShallow + type(MPI_Datatype), dimension(1) :: sendTypesGasShallow integer(kind=MPI_AN_MYADDR), dimension(1) :: sendDisplsGasShallow - integer, dimension(1) :: sendCountsBxyzShallow, sendTypesBxyzShallow + integer, dimension(1) :: sendCountsBxyzShallow + type(MPI_Datatype), dimension(1) :: sendTypesBxyzShallow integer(kind=MPI_AN_MYADDR), dimension(1) :: sendDisplsBxyzShallow - integer, dimension(1) :: recvCountsIneijkShallow, recvTypesIneijkShallow + integer, dimension(1) :: recvCountsIneijkShallow + type(MPI_Datatype), dimension(1) :: recvTypesIneijkShallow integer(kind=MPI_AN_MYADDR), dimension(1) :: recvDisplsIneijkShallow - integer, dimension(1) :: recvCountsInexyzShallow, recvTypesInexyzShallow + integer, dimension(1) :: recvCountsInexyzShallow + type(MPI_Datatype), dimension(1) :: recvTypesInexyzShallow integer(kind=MPI_AN_MYADDR), dimension(1) :: recvDisplsInexyzShallow ! SHALLOW ASYNCHRONOUS VARIABLES - integer :: shallowGasSendReq=MPI_REQUEST_NULL, shallowBxyzSendReq=MPI_REQUEST_NULL, shallowTimeBcastReq=MPI_REQUEST_NULL + type(MPI_Request) :: shallowGasSendReq, shallowBxyzSendReq, shallowTimeBcastReq real(rp), dimension(:,:,:,:,:), allocatable :: gasBuffer real(rp), dimension(:,:,:,:), allocatable :: bxyzBuffer ! DEEP COUPLING VARIABLES - integer, dimension(1) :: sendCountsGasDeep, sendTypesGasDeep + integer, dimension(1) :: sendCountsGasDeep + type(MPI_Datatype), dimension(1) :: sendTypesGasDeep integer(kind=MPI_AN_MYADDR), dimension(1) :: sendDisplsGasDeep - integer, dimension(1) :: sendCountsBxyzDeep, sendTypesBxyzDeep + integer, dimension(1) :: sendCountsBxyzDeep + type(MPI_Datatype), dimension(1) :: sendTypesBxyzDeep integer(kind=MPI_AN_MYADDR), dimension(1) :: sendDisplsBxyzDeep - integer, dimension(1) :: recvCountsGas0Deep, recvTypesGas0Deep + integer, dimension(1) :: recvCountsGas0Deep + type(MPI_Datatype), dimension(1) :: recvTypesGas0Deep integer(kind=MPI_AN_MYADDR), dimension(1) :: recvDisplsGas0Deep end type gam2VoltCommMpi_T @@ -53,17 +61,21 @@ module gam2VoltComm_mpi subroutine initGam2Volt(g2vComm, gApp, allComm, optFilename, doIO) type(gam2VoltCommMpi_T), intent(inout) :: g2vComm type(gamAppMpi_T), intent(inout) :: gApp - integer, intent(in) :: allComm + type(MPI_Comm), intent(in) :: allComm character(len=*), optional, intent(in) :: optFilename logical, optional, intent(in) :: doIO - integer :: length, commSize, ierr, numCells, dataCount, numInNeighbors, numOutNeighbors, voltComm + integer :: length, commSize, ierr, numCells, dataCount, numInNeighbors, numOutNeighbors + type(MPI_Comm) :: voltComm character( len = MPI_MAX_ERROR_STRING) :: message logical :: reorder, wasWeighted, doIOX character(len=strLen) :: inpXML type(XML_Input_T) :: xmlInp integer, dimension(1) :: rankArray, weightArray + ! initialize the zeroArrayTypes array + g2vComm%zeroArrayTypes = (/ MPI_INT /) + ! split voltron helpers off of the communicator ! split allComm into a communicator with only the non-helper voltron rank call MPI_Comm_rank(allComm, commSize, ierr) @@ -108,9 +120,9 @@ module gam2VoltComm_mpi call MPI_Comm_rank(voltComm, g2vComm%myRank, ierr) ! send my i/j/k ranks to the voltron rank - call mpi_gather(gApp%Grid%Ri, 1, MPI_INT, 0, 0, 0, commSize-1, voltComm, ierr) - call mpi_gather(gApp%Grid%Rj, 1, MPI_INT, 0, 0, 0, commSize-1, voltComm, ierr) - call mpi_gather(gApp%Grid%Rk, 1, MPI_INT, 0, 0, 0, commSize-1, voltComm, ierr) + call mpi_gather(gApp%Grid%Ri, 1, MPI_INT, 0, 0, MPI_DATATYPE_NULL, commSize-1, voltComm, ierr) + call mpi_gather(gApp%Grid%Rj, 1, MPI_INT, 0, 0, MPI_DATATYPE_NULL, commSize-1, voltComm, ierr) + call mpi_gather(gApp%Grid%Rk, 1, MPI_INT, 0, 0, MPI_DATATYPE_NULL, commSize-1, voltComm, ierr) numCells = gApp%Grid%Nip*gApp%Grid%Njp*gApp%Grid%Nkp ! rank 0 send the number of physical cells to voltron rank @@ -154,9 +166,9 @@ module gam2VoltComm_mpi g2vComm%voltRank = rankArray(1) ! send i/j/k ranks again since my rank may have changed in the new communicator - call mpi_gather(gApp%Grid%Ri, 1, MPI_INT, 0, 0, 0, g2vComm%voltRank, g2vComm%voltMpiComm, ierr) - call mpi_gather(gApp%Grid%Rj, 1, MPI_INT, 0, 0, 0, g2vComm%voltRank, g2vComm%voltMpiComm, ierr) - call mpi_gather(gApp%Grid%Rk, 1, MPI_INT, 0, 0, 0, g2vComm%voltRank, g2vComm%voltMpiComm, ierr) + call mpi_gather(gApp%Grid%Ri, 1, MPI_INT, 0, 0, MPI_DATATYPE_NULL, g2vComm%voltRank, g2vComm%voltMpiComm, ierr) + call mpi_gather(gApp%Grid%Rj, 1, MPI_INT, 0, 0, MPI_DATATYPE_NULL, g2vComm%voltRank, g2vComm%voltMpiComm, ierr) + call mpi_gather(gApp%Grid%Rk, 1, MPI_INT, 0, 0, MPI_DATATYPE_NULL, g2vComm%voltRank, g2vComm%voltMpiComm, ierr) ! Send restart number so that voltron can ensure they have the same number ! only the rank with Ri/Rj/Rk==0 should send the value to voltron @@ -650,10 +662,10 @@ module gam2VoltComm_mpi type(gamAppMpi_T), intent(in) :: gApp integer :: ierr, dataSize, sendDataOffset, recvDataOffset - integer :: iJP, iJPjP, iJPjPkP, iJPjPkP4Gas, iJpjPkP5Gas, iJP3, iPSI, iPSI1 - integer :: Bxyz2, Bxyz3, Bxyz4, Eijk2, EIjk3, Eijk4, Exyz2, Exyz3, Exyz4 - integer :: iP,iPjP,iPjPkP,iPjPkP4Gas,iPjPkP4Bxyz,iPjPkP5Gas - integer :: iPG2,iPG2jPG2,iPG2jPG2kPG2,iPG2jPG2kPG24Gas,iPG2jPG2kPG25Gas + type(MPI_Datatype) :: iJP, iJPjP, iJPjPkP, iJPjPkP4Gas, iJpjPkP5Gas, iJP3, iPSI, iPSI1 + type(MPI_Datatype) :: Bxyz2, Bxyz3, Bxyz4, Eijk2, EIjk3, Eijk4, Exyz2, Exyz3, Exyz4 + type(MPI_Datatype) :: iP,iPjP,iPjPkP,iPjPkP4Gas,iPjPkP4Bxyz,iPjPkP5Gas + type(MPI_Datatype) :: iPG2,iPG2jPG2,iPG2jPG2kPG2,iPG2jPG2kPG24Gas,iPG2jPG2kPG25Gas associate(Grid=>gApp%Grid,Model=>gApp%Model, & JpSt=>g2vComm%JpSt,JpSh=>g2vComm%JpSh, & diff --git a/src/voltron/mpi/voltapp_mpi.F90 b/src/voltron/mpi/voltapp_mpi.F90 index 4f80bb5e..107b6cd5 100644 --- a/src/voltron/mpi/voltapp_mpi.F90 +++ b/src/voltron/mpi/voltapp_mpi.F90 @@ -5,7 +5,7 @@ module voltapp_mpi use voltmpitypes use gamapp_mpi use gamapp - use mpi + use mpi_f08 use ebsquish, only : SquishBlocksRemain, DoSquishBlock use, intrinsic :: ieee_arithmetic, only: IEEE_VALUE, IEEE_SIGNALING_NAN, IEEE_QUIET_NAN use volthelpers_mpi @@ -71,14 +71,15 @@ module voltapp_mpi subroutine initVoltron_mpi(vApp, userInitFunc, helperComm, allComm, optFilename) type(voltAppMpi_T), intent(inout) :: vApp procedure(StateIC_T), pointer, intent(in) :: userInitFunc - integer, intent(in) :: helperComm - integer, intent(in) :: allComm + type(MPI_Comm), intent(in) :: helperComm + type(MPI_Comm), intent(in) :: allComm character(len=*), optional, intent(in) :: optFilename character(len=strLen) :: inpXML type(XML_Input_T) :: xmlInp integer :: commSize, ierr, numCells, length, ic, numInNeighbors, numOutNeighbors - integer :: voltComm, nHelpers, gamNRES + type(MPI_Comm) :: voltComm + integer :: nHelpers, gamNRES character( len = MPI_MAX_ERROR_STRING) :: message logical :: reorder, wasWeighted integer, allocatable, dimension(:) :: neighborRanks, inData, outData @@ -456,7 +457,8 @@ module voltapp_mpi type(voltAppMpi_T), intent(inout) :: vApp real(rp), intent(in) :: time - integer :: ierr, asyncShallowBcastReq + integer :: ierr + type(MPI_Request) :: asyncShallowBcastReq if(vApp%firstDeepUpdate .and. vApp%firstShallowUpdate) then call firstDeep(vApp) @@ -1055,13 +1057,14 @@ module voltapp_mpi integer, dimension(1:SIZE(vApp%recvRanks)+1), intent(in) :: iRanks, jRanks, kRanks integer :: ierr, NiRanks, NjRanks, NkRanks, NipT, NjpT, NkpT, dataSize - integer :: r, rRank, recvDataOffset, recvDatatype - integer :: iJP, iJPjP, iJPjPkP, iJPjPkP4Gas, iJPjPkP5Gas, iJPjPkP4Bxyz, iJPjPkP5Bxyz - integer :: iJP3, iJP3jP, iJP3jPG, iJP3jPG2, iJP3jPkPG2, iJP3jPGkPG2, iJP3jPG2kPG2 - integer :: iJP3jPkPG24Bxyz, iJP3jPGkPG24Bxyz, iJP3jPG2kPG24Bxyz - integer :: sRank,sendDataOffset,iPSI,iPSI1,Exyz2,Eijk2,Exyz3,Eijk3,Exyz4,Eijk4 - integer :: iP,iPjP,iPjPkP,iPjPkP4Bxyz,iPjPkP4Gas,iPjPkP5Gas - integer :: iPG2,iPG2jPG2,iPG2jPG2kPG2,iPG2jPG2kPG24Gas,iPG2jPG2kPG25Gas + integer :: r, rRank, recvDataOffset, sRank, sendDataOffset + type(MPI_Datatype) :: recvDatatype + type(MPI_Datatype) :: iJP, iJPjP, iJPjPkP, iJPjPkP4Gas, iJPjPkP5Gas, iJPjPkP4Bxyz, iJPjPkP5Bxyz + type(MPI_Datatype) :: iJP3, iJP3jP, iJP3jPG, iJP3jPG2, iJP3jPkPG2, iJP3jPGkPG2, iJP3jPG2kPG2 + type(MPI_Datatype) :: iJP3jPkPG24Bxyz, iJP3jPGkPG24Bxyz, iJP3jPG2kPG24Bxyz + type(MPI_Datatype) :: iPSI,iPSI1,Exyz2,Eijk2,Exyz3,Eijk3,Exyz4,Eijk4 + type(MPI_Datatype) :: iP,iPjP,iPjPkP,iPjPkP4Bxyz,iPjPkP4Gas,iPjPkP5Gas + type(MPI_Datatype) :: iPG2,iPG2jPG2,iPG2jPG2kPG2,iPG2jPG2kPG24Gas,iPG2jPG2kPG25Gas associate(Grid=>vApp%gAppLocal%Grid,Model=>vApp%gAppLocal%Model, & JpSt=>vApp%mhd2mix%JStart,JpSh=>vApp%mhd2mix%JShells, & @@ -1304,7 +1307,9 @@ module voltapp_mpi type(voltAppMpi_T), intent(inout) :: vApp logical, intent(out) :: helperQuit ! should the helper quit - integer :: ierr, helpType, helpReq = MPI_REQUEST_NULL + integer :: ierr, helpType + type(MPI_Request) :: helpReq + helperQuit = .false. ! don't quit normally ! assumed to only be in this function if helpers are enabled diff --git a/src/voltron/mpi/volthelpers_mpi.F90 b/src/voltron/mpi/volthelpers_mpi.F90 index 53944177..a47ee7d1 100644 --- a/src/voltron/mpi/volthelpers_mpi.F90 +++ b/src/voltron/mpi/volthelpers_mpi.F90 @@ -2,7 +2,7 @@ module volthelpers_mpi use voltmpitypes - use mpi + use mpi_f08 use ebsquish, only : SquishBlocksRemain, DoSquishBlock use, intrinsic :: ieee_arithmetic, only: IEEE_VALUE, IEEE_SIGNALING_NAN, IEEE_QUIET_NAN @@ -95,7 +95,7 @@ module volthelpers_mpi subroutine sendChimpStateData(ebState, vHelpComm) type(ebState_T), intent(in) :: ebState - integer, intent(in) :: vHelpComm + type(MPI_Comm), intent(in) :: vHelpComm integer :: ierr, length character( len = MPI_MAX_ERROR_STRING) :: message @@ -156,7 +156,7 @@ module volthelpers_mpi subroutine recvChimpStateData(ebState, vHelpComm) type(ebState_T), intent(inout) :: ebState - integer, intent(in) :: vHelpComm + type(MPI_Comm), intent(in) :: vHelpComm integer :: ierr, length character( len = MPI_MAX_ERROR_STRING) :: message @@ -318,7 +318,8 @@ module volthelpers_mpi type(voltAppMpi_T), intent(in) :: vApp integer, intent(in) :: rType - integer :: ierr, helpReq = MPI_REQUEST_NULL + integer :: ierr + type(MPI_Request) :: helpReq ! async to match waiting helper nodes call mpi_Ibcast(rType, 1, MPI_INT, 0, vApp%vHelpComm, helpReq, ierr) @@ -399,7 +400,8 @@ module volthelpers_mpi subroutine vhReqSquishEnd(vApp) type(voltAppMpi_T), intent(inout) :: vApp - integer :: ierr,length,i,firstBlock,ks,ke, oldSizes(4), newSizes(4), offsets(4), newtype + integer :: ierr,length,i,firstBlock,ks,ke, oldSizes(4), newSizes(4), offsets(4) + type(MPI_Datatype) :: newtype character( len = MPI_MAX_ERROR_STRING) :: message call Tic("VHReqSquishE") @@ -481,7 +483,8 @@ module volthelpers_mpi subroutine vhHandleSquishEnd(vApp) type(voltAppMpi_T), intent(inout) :: vApp - integer :: ierr,length,ks,ke, oldSizes(4), newSizes(4), offsets(4), newtype + integer :: ierr,length,ks,ke, oldSizes(4), newSizes(4), offsets(4) + type(MPI_Datatype) :: newtype character( len = MPI_MAX_ERROR_STRING) :: message oldSizes = shape(vApp%chmp2mhd%xyzSquish) diff --git a/src/voltron/mpi/voltmpitypes.F90 b/src/voltron/mpi/voltmpitypes.F90 index 5600e668..7354c0f8 100644 --- a/src/voltron/mpi/voltmpitypes.F90 +++ b/src/voltron/mpi/voltmpitypes.F90 @@ -2,58 +2,65 @@ module voltmpitypes use voltapp - use mpi + use mpi_f08 use mpidefs implicit none type, extends(voltApp_T) :: voltAppMpi_T ! voltron to helpers comms variables - integer :: vHelpComm = MPI_COMM_NULL + type(MPI_Comm) :: vHelpComm integer :: vHelpRank - integer :: vHelpWin = MPI_WIN_NULL + type(MPI_Win) :: vHelpWin integer, dimension(:), allocatable :: vHelpIdle logical :: amHelper = .false., useHelpers = .false. logical :: doSquishHelp = .false., masterSquish = .false. ! voltron to gamera comms variables - integer :: voltMpiComm = MPI_COMM_NULL + type(MPI_Comm) :: voltMpiComm integer :: myRank type(gamApp_T) :: gAppLocal logical :: doSerialVoltron = .false., doAsyncShallow = .true. logical :: firstShallowUpdate = .true., firstDeepUpdate = .true., firstStepUpdate = .true. ! array of all zeroes to simplify various send/receive calls - integer, dimension(:), allocatable :: zeroArrayCounts, zeroArrayTypes + integer, dimension(:), allocatable :: zeroArrayCounts + type(MPI_Datatype), dimension(:), allocatable :: zeroArrayTypes integer(kind=MPI_AN_MYADDR), dimension(:), allocatable :: zeroArrayDispls ! list of gamera ranks to communicate with integer, dimension(:), allocatable :: sendRanks, recvRanks ! STEP VOLTRON VARIABLES - integer :: timeReq=MPI_REQUEST_NULL, timeStepReq=MPI_REQUEST_NULL + type(MPI_Request) :: timeReq, timeStepReq real(rp) :: timeBuffer integer :: timeStepBuffer ! SHALLOW COUPLING VARIABLES - integer, dimension(:), allocatable :: recvCountsGasShallow, recvTypesGasShallow + integer, dimension(:), allocatable :: recvCountsGasShallow + type(MPI_Datatype), dimension(:), allocatable :: recvTypesGasShallow integer(kind=MPI_AN_MYADDR), dimension(:), allocatable :: recvDisplsGasShallow - integer, dimension(:), allocatable :: recvCountsBxyzShallow, recvTypesBxyzShallow + integer, dimension(:), allocatable :: recvCountsBxyzShallow + type(MPI_Datatype), dimension(:), allocatable :: recvTypesBxyzShallow integer(kind=MPI_AN_MYADDR), dimension(:), allocatable :: recvDisplsBxyzShallow - integer, dimension(:), allocatable :: sendCountsIneijkShallow, sendTypesIneijkShallow + integer, dimension(:), allocatable :: sendCountsIneijkShallow + type(MPI_Datatype), dimension(:), allocatable :: sendTypesIneijkShallow integer(kind=MPI_AN_MYADDR), dimension(:), allocatable :: sendDisplsIneijkShallow - integer, dimension(:), allocatable :: sendCountsInexyzShallow, sendTypesInexyzShallow + integer, dimension(:), allocatable :: sendCountsInexyzShallow + type(MPI_Datatype), dimension(:), allocatable :: sendTypesInexyzShallow integer(kind=MPI_AN_MYADDR), dimension(:), allocatable :: sendDisplsInexyzShallow ! SHALLOW ASYNCHRONOUS VARIABLES - integer :: shallowIneijkSendReq=MPI_REQUEST_NULL, shallowInexyzSendReq=MPI_REQUEST_NULL - integer :: asyncShallowBcastReq=MPI_REQUEST_NULL + type(MPI_Request) :: shallowIneijkSendReq, shallowInexyzSendReq, asyncShallowBcastReq ! DEEP COUPLING VARIABLES - integer, dimension(:), allocatable :: recvCountsGasDeep, recvTypesGasDeep + integer, dimension(:), allocatable :: recvCountsGasDeep + type(MPI_Datatype), dimension(:), allocatable :: recvTypesGasDeep integer(kind=MPI_AN_MYADDR), dimension(:), allocatable :: recvDisplsGasDeep - integer, dimension(:), allocatable :: recvCountsBxyzDeep, recvTypesBxyzDeep + integer, dimension(:), allocatable :: recvCountsBxyzDeep + type(MPI_Datatype), dimension(:), allocatable :: recvTypesBxyzDeep integer(kind=MPI_AN_MYADDR), dimension(:), allocatable :: recvDisplsBxyzDeep - integer, dimension(:), allocatable :: sendCountsGas0Deep, sendTypesGas0Deep + integer, dimension(:), allocatable :: sendCountsGas0Deep + type(MPI_Datatype), dimension(:), allocatable :: sendTypesGas0Deep integer(kind=MPI_AN_MYADDR), dimension(:), allocatable :: sendDisplsGas0Deep logical :: deepProcessingInProgress = .false.