Skip to content

Commit

Permalink
Get rid of mpp pelist scoping calls in fill_nested_grid_cpl(). This d…
Browse files Browse the repository at this point in the history
…epends on NOAA-GFDL/FMS#1246 to be functional.  More efficient 'if' test in fill_nested_grid_cpl()
  • Loading branch information
Dan Kokron authored and Dan Kokron committed Jun 9, 2023
1 parent c7313bf commit 407792c
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 20 deletions.
13 changes: 4 additions & 9 deletions driver/fvGFS/atmosphere.F90
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ module atmosphere_mod
mpp_sync, mpp_sync_self, mpp_send, mpp_recv, mpp_broadcast
use mpp_parameter_mod, only: EUPDATE, WUPDATE, SUPDATE, NUPDATE
use mpp_domains_mod, only: CENTER, CORNER, NORTH, EAST, WEST, SOUTH
use mpp_domains_mod, only: domain2d, mpp_update_domains, mpp_global_field
use mpp_domains_mod, only: domain2d, mpp_update_domains, mpp_global_field, mpp_domain_is_tile_root_pe
use mpp_domains_mod, only: mpp_get_data_domain, mpp_get_compute_domain, mpp_get_global_domain
use xgrid_mod, only: grid_box_type
use field_manager_mod, only: MODEL_ATMOS
Expand Down Expand Up @@ -2433,7 +2433,6 @@ subroutine fill_nested_grid_cpl(this_grid, proc_in)
logical, intent(in), optional :: proc_in

real, allocatable :: g_dat(:,:,:)
integer, allocatable :: global_pelist(:)
integer :: p
integer :: isd_p, ied_p, jsd_p, jed_p
integer :: isg, ieg, jsg, jeg
Expand All @@ -2460,13 +2459,9 @@ subroutine fill_nested_grid_cpl(this_grid, proc_in)
g_dat(isg:,jsg:,1), position=CENTER)
endif

allocate(global_pelist(mpp_npes()))
call mpp_get_current_pelist(global_pelist)
if(any(mpp_pe() == Atm(this_grid)%Bcast_ranks)) then
call mpp_set_current_pelist(Atm(this_grid)%Bcast_ranks)
call mpp_broadcast(g_dat, size(g_dat),Atm(this_grid)%sending_proc, Atm(this_grid)%Bcast_ranks)
if(Atm(this_grid)%is_fine_pe .or. mpp_domain_is_tile_root_pe(Atm(this_grid)%parent_grid%domain)) then
call mpp_broadcast(g_dat, size(g_dat),Atm(this_grid)%Bcast_ranks(1), Atm(this_grid)%Bcast_ranks)
endif
call mpp_set_current_pelist(global_pelist)

call timing_off('COMM_TOTAL')
if (process) then
Expand All @@ -2475,7 +2470,7 @@ subroutine fill_nested_grid_cpl(this_grid, proc_in)
0, 0, isg, ieg, jsg, jeg, Atm(this_grid)%bd)
endif
deallocate(g_dat)
deallocate(global_pelist)
!deallocate(global_pelist)

end subroutine fill_nested_grid_cpl

Expand Down
1 change: 1 addition & 0 deletions model/fv_arrays.F90
Original file line number Diff line number Diff line change
Expand Up @@ -1306,6 +1306,7 @@ module fv_arrays_mod
! to replace numerous p2p MPI transfers with a single mpp_broadcast()
integer, allocatable :: Bcast_ranks(:)
integer :: Bcast_comm, sending_proc
logical :: is_fine_pe

type(fv_grid_bounds_type) :: bd

Expand Down
23 changes: 12 additions & 11 deletions model/fv_control.F90
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ module fv_control_mod
use test_cases_mod, only: read_namelist_test_case_nml
use fv_timing_mod, only: timing_on, timing_off, timing_init, timing_prt
use mpp_domains_mod, only: domain2D
use mpp_domains_mod, only: mpp_define_nest_domains, nest_domain_type, mpp_get_global_domain
use mpp_domains_mod, only: mpp_define_nest_domains, nest_domain_type, mpp_get_global_domain, mpp_is_nest_fine
use mpp_domains_mod, only: mpp_get_C2F_index, mpp_get_F2C_index
use mpp_domains_mod, only: CENTER, CORNER, NORTH, EAST, WEST, SOUTH
use mpp_mod, only: mpp_send, mpp_sync, mpp_transmit, mpp_set_current_pelist, &
Expand Down Expand Up @@ -733,21 +733,22 @@ subroutine fv_control_init(Atm, dt_atmos, this_grid, grids_on_this_pe, p_split,
Atm(this_grid)%neststruct%parent_proc = ANY(Atm(this_grid)%neststruct%child_grids) !ANY(tile_coarse == Atm(this_grid)%global_tile)
Atm(this_grid)%neststruct%child_proc = ASSOCIATED(Atm(this_grid)%parent_grid) !this means a nested grid

call mpp_set_current_pelist( global_pelist )
do n=1,ngrids
if(n>1) then
call mpp_set_current_pelist( global_pelist )
do n=2,ngrids
! Construct the MPI communicators that are used in fill_nested_grid_cpl()
if(n>1) then
Atm(n)%sending_proc = Atm(n)%parent_grid%pelist(1) + &
( Atm(n)%neststruct%parent_tile-tile_fine(Atm(n)%parent_grid%grid_number)+ &
Atm(n)%parent_grid%flagstruct%ntiles-1 )*Atm(n)%parent_grid%npes_per_tile
allocate(Atm(n)%Bcast_ranks(0:size(Atm(n)%pelist)))
allocate(Atm(n)%Bcast_ranks(1+size(Atm(n)%pelist)))

Atm(n)%Bcast_ranks(0)=Atm(n)%sending_proc ! parent grid sending rank within the soon to be created Bcast_comm
Atm(n)%Bcast_ranks(1:size(Atm(n)%pelist))=Atm(n)%pelist ! Receivers
call mpp_declare_pelist(Atm(n)%Bcast_ranks)
endif
enddo
call mpp_set_current_pelist(Atm(this_grid)%pelist)
Atm(n)%Bcast_ranks(1)=Atm(n)%sending_proc ! parent grid sending rank within the soon to be created Bcast_comm
Atm(n)%Bcast_ranks(2:(1+size(Atm(n)%pelist)))=Atm(n)%pelist ! Receivers
call mpp_declare_pelist(Atm(n)%Bcast_ranks(:))
Atm(n)%is_fine_pe = mpp_is_nest_fine(global_nest_domain, 1)
enddo
call mpp_set_current_pelist(Atm(this_grid)%pelist)
endif

if (ngrids > 1) call setup_update_regions
if (Atm(this_grid)%neststruct%nestbctype > 1) then
Expand Down

0 comments on commit 407792c

Please sign in to comment.