Skip to content

Commit

Permalink
Use mpp_broadcast instead of MPI_Bcast at the request of NOAA-GFDL
Browse files Browse the repository at this point in the history
  • Loading branch information
Dan Kokron authored and Dan Kokron committed May 23, 2023
1 parent 1426af8 commit c7313bf
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 31 deletions.
16 changes: 11 additions & 5 deletions driver/fvGFS/atmosphere.F90
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ module atmosphere_mod
mpp_npes, mpp_pe, mpp_chksum, &
mpp_get_current_pelist, &
mpp_set_current_pelist, &
mpp_sync, mpp_sync_self, mpp_send, mpp_recv
mpp_sync, mpp_sync_self, mpp_send, mpp_recv, mpp_broadcast
use mpp_parameter_mod, only: EUPDATE, WUPDATE, SUPDATE, NUPDATE
use mpp_domains_mod, only: CENTER, CORNER, NORTH, EAST, WEST, SOUTH
use mpp_domains_mod, only: domain2d, mpp_update_domains, mpp_global_field
Expand Down Expand Up @@ -2429,12 +2429,12 @@ end subroutine atmosphere_fill_nest_cpl
!! coupling variables from its parent grid
!>@details Fill parent2nest_2d on the nested grid with values from its parent.
subroutine fill_nested_grid_cpl(this_grid, proc_in)
use mpi
integer, intent(in) :: this_grid
logical, intent(in), optional :: proc_in

real, allocatable :: g_dat(:,:,:)
integer :: p, ierr
integer, allocatable :: global_pelist(:)
integer :: p
integer :: isd_p, ied_p, jsd_p, jed_p
integer :: isg, ieg, jsg, jeg
integer :: isc, iec, jsc, jec
Expand All @@ -2459,17 +2459,23 @@ subroutine fill_nested_grid_cpl(this_grid, proc_in)
Atm(this_grid)%parent_grid%parent2nest_2d(isd_p:ied_p,jsd_p:jed_p), &
g_dat(isg:,jsg:,1), position=CENTER)
endif

allocate(global_pelist(mpp_npes()))
call mpp_get_current_pelist(global_pelist)
if(any(mpp_pe() == Atm(this_grid)%Bcast_ranks)) then
call MPI_Bcast(g_dat, size(g_dat), MPI_REAL, 0, Atm(this_grid)%Bcast_comm, ierr) ! root==0 because sending rank (Atm(this_grid)%sending) is rank zero in Bcast_comm
call mpp_set_current_pelist(Atm(this_grid)%Bcast_ranks)
call mpp_broadcast(g_dat, size(g_dat),Atm(this_grid)%sending_proc, Atm(this_grid)%Bcast_ranks)
endif
call mpp_set_current_pelist(global_pelist)

call timing_off('COMM_TOTAL')
if (process) then
call fill_nested_grid(Atm(this_grid)%parent2nest_2d, g_dat(isg:,jsg:,1), &
Atm(this_grid)%neststruct%ind_h, Atm(this_grid)%neststruct%wt_h, &
0, 0, isg, ieg, jsg, jeg, Atm(this_grid)%bd)
endif

deallocate(g_dat)
deallocate(global_pelist)

end subroutine fill_nested_grid_cpl

Expand Down
3 changes: 1 addition & 2 deletions model/fv_arrays.F90
Original file line number Diff line number Diff line change
Expand Up @@ -1303,7 +1303,7 @@ module fv_arrays_mod
integer, allocatable, dimension(:) :: pelist

! These are set in fv_control_init() and used in fill_nested_grid_cpl()
! to replace numerous p2p MPI transfers with a single MPI_Bcast
! to replace numerous p2p MPI transfers with a single mpp_broadcast()
integer, allocatable :: Bcast_ranks(:)
integer :: Bcast_comm, sending_proc

Expand Down Expand Up @@ -2070,7 +2070,6 @@ subroutine deallocate_fv_atmos_type(Atm)
endif
#endif
if(allocated(Atm%Bcast_ranks)) deallocate(Atm%Bcast_ranks)
if(Atm%Bcast_comm /= MPI_COMM_NULL) call MPI_Comm_free(Atm%Bcast_comm,ierr)
end if

if (Atm%flagstruct%grid_type < 4) then
Expand Down
40 changes: 16 additions & 24 deletions model/fv_control.F90
Original file line number Diff line number Diff line change
Expand Up @@ -188,10 +188,6 @@ module fv_control_mod

subroutine fv_control_init(Atm, dt_atmos, this_grid, grids_on_this_pe, p_split, &
nml_filename_in, skip_nml_read_in)
use mpi
use esmf
integer :: fcst_mpi_comm, color, ierr, rc
type(ESMF_VM) :: vm

type(fv_atmos_type), allocatable, intent(inout), target :: Atm(:)
real, intent(in) :: dt_atmos
Expand Down Expand Up @@ -456,10 +452,6 @@ subroutine fv_control_init(Atm, dt_atmos, this_grid, grids_on_this_pe, p_split,
allocate(global_pelist(npes))
call mpp_get_current_pelist(global_pelist, commID=global_commID) ! for commID

! Need the fcst_mpi_comm in order to construct the communicators used by MPI_Bcast in fill_nested_grid_cpl()
call ESMF_VMGetCurrent(vm=vm,rc=rc)
call ESMF_VMGet(vm=vm, mpiCommunicator=fcst_mpi_comm, rc=rc)

allocate(grids_master_procs(ngrids))
pecounter = 0
allocate(grids_on_this_pe(ngrids))
Expand Down Expand Up @@ -681,22 +673,6 @@ subroutine fv_control_init(Atm, dt_atmos, this_grid, grids_on_this_pe, p_split,
tile_id = mpp_get_tile_id(Atm(n)%domain)
Atm(n)%global_tile = tile_id(1) ! only meaningful locally
Atm(n)%npes_per_tile = size(Atm(n)%pelist)/Atm(n)%flagstruct%ntiles ! domain decomp doesn't set this globally

! Construct the MPI communicators that are used in fill_nested_grid_cpl()
if(n>1) then
Atm(n)%sending_proc = Atm(n)%parent_grid%pelist(1) + &
( Atm(n)%neststruct%parent_tile-tile_fine(Atm(n)%parent_grid%grid_number)+ &
Atm(n)%parent_grid%flagstruct%ntiles-1 )*Atm(n)%parent_grid%npes_per_tile
allocate(Atm(n)%Bcast_ranks(0:size(Atm(n)%pelist)))

Atm(n)%Bcast_ranks(0)=Atm(n)%sending_proc ! parent grid sending rank within the soon to be created Bcast_comm
Atm(n)%Bcast_ranks(1:size(Atm(n)%pelist))=Atm(n)%pelist ! Receivers

color=0
if(any(mpp_pe() == Atm(n)%Bcast_ranks)) color=1
call MPI_Comm_split(fcst_mpi_comm, color, mpp_pe(), Atm(n)%Bcast_comm, ierr)
if(ierr /= MPI_SUCCESS) print*,'fv_control_init: MPI_Comm_split',n,ierr
endif
enddo

! 6. Set up domain and Atm structure
Expand Down Expand Up @@ -757,6 +733,22 @@ subroutine fv_control_init(Atm, dt_atmos, this_grid, grids_on_this_pe, p_split,
Atm(this_grid)%neststruct%parent_proc = ANY(Atm(this_grid)%neststruct%child_grids) !ANY(tile_coarse == Atm(this_grid)%global_tile)
Atm(this_grid)%neststruct%child_proc = ASSOCIATED(Atm(this_grid)%parent_grid) !this means a nested grid

call mpp_set_current_pelist( global_pelist )
do n=1,ngrids
! Construct the MPI communicators that are used in fill_nested_grid_cpl()
if(n>1) then
Atm(n)%sending_proc = Atm(n)%parent_grid%pelist(1) + &
( Atm(n)%neststruct%parent_tile-tile_fine(Atm(n)%parent_grid%grid_number)+ &
Atm(n)%parent_grid%flagstruct%ntiles-1 )*Atm(n)%parent_grid%npes_per_tile
allocate(Atm(n)%Bcast_ranks(0:size(Atm(n)%pelist)))

Atm(n)%Bcast_ranks(0)=Atm(n)%sending_proc ! parent grid sending rank within the soon to be created Bcast_comm
Atm(n)%Bcast_ranks(1:size(Atm(n)%pelist))=Atm(n)%pelist ! Receivers
call mpp_declare_pelist(Atm(n)%Bcast_ranks)
endif
enddo
call mpp_set_current_pelist(Atm(this_grid)%pelist)

if (ngrids > 1) call setup_update_regions
if (Atm(this_grid)%neststruct%nestbctype > 1) then
call mpp_error(FATAL, 'nestbctype > 1 not yet implemented')
Expand Down

0 comments on commit c7313bf

Please sign in to comment.