diff --git a/cicecore/cicedyn/dynamics/ice_dyn_core1d.F90 b/cicecore/cicedyn/dynamics/ice_dyn_core1d.F90 index 6cbfbed11..95dfaf96a 100644 --- a/cicecore/cicedyn/dynamics/ice_dyn_core1d.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_core1d.F90 @@ -70,7 +70,7 @@ module ice_dyn_core1d ! arguments ------------------------------------------------------------------ subroutine stress_1d (ee, ne, se, lb, ub, & uvel, vvel, dxT, dyT, skipme, strength, & - hte, htn, htem1, htnm1, & + cxp, cyp, cxm, cym, dxhy, dyhx, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4, & @@ -94,10 +94,9 @@ subroutine stress_1d (ee, ne, se, lb, ub, & vvel , & ! y-component of velocity (m/s) dxT , & ! width of T-cell through the middle (m) dyT , & ! height of T-cell through the middle (m) - hte , & - htn , & - htem1 , & - htnm1 + cxp, cyp , & ! grid metrics + cxm, cym , & ! + dxhy, dyhx ! real (kind=dbl_kind), dimension(:), intent(inout), contiguous :: & stressp_1, stressp_2, stressp_3, stressp_4 , & ! sigma11+sigma22 @@ -165,7 +164,8 @@ subroutine stress_1d (ee, ne, se, lb, ub, & !$omp tmp_cxp, tmp_cyp, tmp_cxm, tmp_cym , & !$omp tmp_strength, tmp_DminTarea, tmparea , & !$omp tmp_dxhy, tmp_dyhx) & - !$omp shared(uvel,vvel,dxT,dyT,htn,hte,htnm1,htem1 , & + !$omp shared(uvel,vvel,dxT,dyT , & + !$omp cxp, cyp, cxm, cym, dxhy, dyhx , & !$omp str1,str2,str3,str4,str5,str6,str7,str8 , & !$omp stressp_1,stressp_2,stressp_3,stressp_4 , & !$omp stressm_1,stressm_2,stressm_3,stressm_4 , & @@ -188,15 +188,15 @@ subroutine stress_1d (ee, ne, se, lb, ub, & tmp_uvel_se = uvel(se(iw)) tmp_dxT = dxT(iw) tmp_dyT = dyT(iw) - tmp_cxp = c1p5 * htn(iw) - p5 * htnm1(iw) - tmp_cyp = c1p5 * hte(iw) - p5 * htem1(iw) - tmp_cxm = -(c1p5 * htnm1(iw) - p5 * htn(iw)) - tmp_cym = -(c1p5 * htem1(iw) - p5 * hte(iw)) + tmp_cxp = cxp(iw) + tmp_cyp = cyp(iw) + tmp_cxm = cxm(iw) + tmp_cym = cym(iw) tmp_strength = strength(iw) tmparea = dxT(iw) * dyT(iw) ! necessary to split calc of DminTarea. Otherwize not binary identical tmp_DminTarea = deltaminEVP * tmparea - tmp_dxhy = p5 * (hte(iw) - htem1(iw)) - tmp_dyhx = p5 * (htn(iw) - htnm1(iw)) + tmp_dxhy = dxhy(iw) + tmp_dyhx = dyhx(iw) !-------------------------------------------------------------------------- ! strain rates - NOTE these are actually strain rates * area (m^2/s) diff --git a/cicecore/cicedyn/dynamics/ice_dyn_evp.F90 b/cicecore/cicedyn/dynamics/ice_dyn_evp.F90 index 3ee5d2a53..a9d4aa2a1 100644 --- a/cicecore/cicedyn/dynamics/ice_dyn_evp.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_evp.F90 @@ -121,7 +121,8 @@ module ice_dyn_evp subroutine init_evp use ice_blocks, only: nx_block, ny_block, nghost use ice_domain_size, only: max_blocks, nx_global, ny_global - use ice_grid, only: grid_ice, dyT, dxT, uarear, tmask, G_HTE, G_HTN + use ice_grid, only: grid_ice, dyT, dxT, uarear, tmask, & + cxp, cyp, cxm, cym, dxhy, dyhx use ice_calendar, only: dt_dyn use ice_dyn_shared, only: init_dyn_shared, evp_algorithm use ice_dyn_evp1d, only: dyn_evp1d_init @@ -136,7 +137,8 @@ subroutine init_evp if (evp_algorithm == "shared_mem_1d" ) then call dyn_evp1d_init(nx_global , ny_global, nx_block, ny_block, & max_blocks, nghost , dyT , dxT , & - uarear , tmask , G_HTE , G_HTN) + uarear , tmask , cxp , cyp , & + cxm , cym , dxhy , dyhx) endif allocate( uocnU (nx_block,ny_block,max_blocks), & ! i ocean current (m/s) diff --git a/cicecore/cicedyn/dynamics/ice_dyn_evp1d.F90 b/cicecore/cicedyn/dynamics/ice_dyn_evp1d.F90 index 50694b42c..dcab4818f 100644 --- a/cicecore/cicedyn/dynamics/ice_dyn_evp1d.F90 +++ b/cicecore/cicedyn/dynamics/ice_dyn_evp1d.F90 @@ -35,7 +35,6 @@ module ice_dyn_evp1d ! indexes integer(kind=int_kind), allocatable, dimension(:,:) :: iwidx logical(kind=log_kind), allocatable, dimension(:) :: skipTcell,skipUcell - real (kind=dbl_kind), allocatable, dimension(:) :: HTE,HTN, HTEm1,HTNm1 integer(kind=int_kind), allocatable, dimension(:) :: ee,ne,se,nw,sw,sse ! arrays for neighbour points integer(kind=int_kind), allocatable, dimension(:) :: indxti, indxtj, indxTij @@ -43,7 +42,7 @@ module ice_dyn_evp1d real(kind=dbl_kind) , allocatable, dimension(:) :: & cdn_ocn,aiu,uocn,vocn,waterxU,wateryU,forcexU,forceyU,umassdti,fmU,uarear, & strintxU,strintyU,uvel_init,vvel_init, & - strength, uvel, vvel, dxt, dyt, & + strength, uvel, vvel, dxt, dyt, cxp, cyp, cxm, cym, dxhy, dyhx, & stressp_1, stressp_2, stressp_3, stressp_4, stressm_1, stressm_2, & stressm_3, stressm_4, stress12_1, stress12_2, stress12_3, stress12_4, & str1, str2, str3, str4, str5, str6, str7, str8, Tbu, Cb @@ -68,26 +67,30 @@ module ice_dyn_evp1d ! This follows CICE naming ! In addition all water points are assumed to be active and allocated thereafter. subroutine dyn_evp1d_init(nx_global, ny_global, cnx_block, cny_block, & - cmax_block, cnghost , & - L_dyT, L_dxT, L_uarear, L_tmask, G_HTE, G_HTN) + cmax_block, cnghost, & + L_dyT, L_dxT, L_uarear, L_tmask, & + L_cxp, L_cyp, L_cxm, L_cym, L_dxhy, L_dyhx) + use ice_communicate, only : master_task + use ice_gather_scatter, only : gather_global_ext + use ice_domain, only : distrb_info ! Note that TMask is ocean/land implicit none integer(kind=int_kind), intent(in) :: & nx_global, ny_global, cnx_block, cny_block, cmax_block, cnghost real(kind=dbl_kind) , dimension(:,:,:), intent(in) :: & - L_dyT, L_dxT, L_uarear + L_dyT, L_dxT, L_uarear, L_cxp, L_cyp, L_cxm, L_cym, L_dxhy, L_dyhx logical(kind=log_kind), dimension(:,:,:), intent(in) :: & L_tmask ! variables for Global arrays (domain size) - real(kind=dbl_kind) , dimension(:,:) , intent(in) :: G_HTE,G_HTN - real(kind=dbl_kind) , dimension(:,:) , allocatable :: G_dyT, G_dxT, G_uarear - logical(kind=log_kind), dimension(:,:) , allocatable :: G_tmask + real(kind=dbl_kind) , dimension(:,:), allocatable :: & + G_dyT, G_dxT, G_uarear, G_cxp, G_cyp, G_cxm, G_cym, G_dxhy, G_dyhx + logical(kind=log_kind), dimension(:,:), allocatable :: G_tmask + integer(kind=int_kind) :: ios, ierr character(len=*), parameter :: subname = '(dyn_evp1d_init)' - integer(kind=int_kind) :: ios, ierr nx_block=cnx_block ny_block=cny_block @@ -98,14 +101,26 @@ subroutine dyn_evp1d_init(nx_global, ny_global, cnx_block, cny_block, & ny=ny_global+2*nghost allocate(G_dyT(nx,ny),G_dxT(nx,ny),G_uarear(nx,ny),G_tmask(nx,ny),stat=ierr) + if (ierr/=0) then + call abort_ice(subname//' ERROR: allocating', file=__FILE__, line=__LINE__) + endif + allocate(G_cxp(nx,ny),G_cyp(nx,ny),G_cxm(nx,ny),G_cym(nx,ny),G_dxhy(nx,ny),G_dyhx(nx,ny),stat=ierr) if (ierr/=0) then call abort_ice(subname//' ERROR: allocating', file=__FILE__, line=__LINE__) endif - ! gather from blks to global - call gather_static(L_uarear, L_dxT, L_dyT, L_Tmask, & - G_uarear, G_dxT, G_dyT, G_Tmask) + ! copy from distributed I_* to G_* + call gather_global_ext(G_uarear, L_uarear, master_task, distrb_info) + call gather_global_ext(G_dxT , L_dxT , master_task, distrb_info) + call gather_global_ext(G_dyT , L_dyT , master_task, distrb_info) + call gather_global_ext(G_Tmask , L_Tmask , master_task, distrb_info) + call gather_global_ext(G_cxp , L_cxp , master_task, distrb_info) + call gather_global_ext(G_cyp , L_cyp , master_task, distrb_info) + call gather_global_ext(G_cxm , L_cxm , master_task, distrb_info) + call gather_global_ext(G_cym , L_cym , master_task, distrb_info) + call gather_global_ext(G_dxhy , L_dxhy , master_task, distrb_info) + call gather_global_ext(G_dyhx , L_dyhx , master_task, distrb_info) ! calculate number of water points (T and U). Only needed for the static version ! Tmask in ocean/ice @@ -116,10 +131,14 @@ subroutine dyn_evp1d_init(nx_global, ny_global, cnx_block, cny_block, & call calc_navel(nActive, navel) call evp1d_alloc_static_navel(navel) call numainit(1,nActive,navel) - call convert_2d_1d_init(nActive,G_HTE, G_HTN, G_uarear, G_dxT, G_dyT) + call convert_2d_1d_init(nActive, G_uarear, G_dxT, G_dyT, & + G_cxp, G_cyp, G_cxm, G_cym, G_dxhy, G_dyhx) call evp1d_alloc_static_halo() endif + deallocate(G_dyT,G_dxT,G_uarear,G_tmask) + deallocate(G_cxp, G_cyp, G_cxm, G_cym, G_dxhy, G_dyhx) + end subroutine dyn_evp1d_init !============================================================================= @@ -223,7 +242,8 @@ subroutine dyn_evp1d_run(L_stressp_1 , L_stressp_2 , L_stressp_3 , L_stressp_4 , call ice_timer_start(timer_evp1dcore) #ifdef _OPENMP_TARGET !$omp target data map(to: ee, ne, se, nw, sw, sse, skipUcell, skipTcell,& - !$omp strength, dxT, dyT, HTE,HTN,HTEm1, HTNm1, & + !$omp strength, dxT, dyT, & + !$omp cxp, cyp, cxm, cym, dxhy, dyhx, & !$omp forcexU, forceyU, umassdti, fmU, uarear, & !$omp uvel_init, vvel_init, Tbu, Cb, & !$omp str1, str2, str3, str4, str5, str6, str7, str8, & @@ -247,7 +267,7 @@ subroutine dyn_evp1d_run(L_stressp_1 , L_stressp_2 , L_stressp_3 , L_stressp_4 , do ksub = 1,ndte ! subcycling call stress_1d (ee, ne, se, 1, nActive, & uvel, vvel, dxT, dyT, skipTcell, strength, & - HTE, HTN, HTEm1, HTNm1, & + cxp, cyp, cxm, cym, dxhy, dyhx, & stressp_1, stressp_2, stressp_3, stressp_4, & stressm_1, stressm_2, stressm_3, stressm_4, & stress12_1, stress12_2, stress12_3, stress12_4, & @@ -369,12 +389,14 @@ subroutine evp1d_alloc_static_na(na0) call abort_ice(subname//' ERROR: allocating', file=__FILE__, line=__LINE__) endif - allocate( HTE (1:na0), & - HTN (1:na0), & - HTEm1 (1:na0), & - HTNm1 (1:na0), & - dxt (1:na0), & + allocate( dxt (1:na0), & dyt (1:na0), & + cxp (1:na0), & + cyp (1:na0), & + cxm (1:na0), & + cym (1:na0), & + dxhy (1:na0), & + dyhx (1:na0), & strength (1:na0), & stressp_1 (1:na0), & stressp_2 (1:na0), & @@ -595,29 +617,6 @@ subroutine union(x, y, xdim, ydim, xy, nxy) nxy = k - 1 end subroutine union - !============================================================================= - subroutine gather_static(L_uarear, L_dxT, L_dyT,L_Tmask, G_uarear,G_dxT,G_dyT, G_Tmask) - ! In standalone distrb_info is an integer. Not needed anyway - use ice_communicate, only : master_task - use ice_gather_scatter, only : gather_global_ext - use ice_domain, only : distrb_info - implicit none - - real(kind=dbl_kind) , dimension(nx_block, ny_block, max_block), intent(in) :: & - L_uarear, L_dxT, L_dyT - - logical(kind=log_kind), dimension(nx_block, ny_block, max_block), intent(in) :: L_Tmask - real(kind=dbl_kind) , dimension(:, :), intent(out) :: G_uarear,G_dxT,G_dyT - logical(kind=log_kind), dimension(:, :), intent(out) :: G_Tmask - character(len=*), parameter :: subname = '(gather_static)' - - ! copy from distributed I_* to G_* - call gather_global_ext(G_uarear, L_uarear, master_task, distrb_info) - call gather_global_ext(G_dxT , L_dxT , master_task, distrb_info) - call gather_global_ext(G_dyT , L_dyT , master_task, distrb_info) - call gather_global_ext(G_Tmask , L_Tmask , master_task, distrb_info) - end subroutine gather_static - !============================================================================= subroutine gather_dyn(L_stressp_1 , L_stressp_2 , L_stressp_3 , L_stressp_4 , & L_stressm_1 , L_stressm_2 , L_stressm_3 , L_stressm_4 , & @@ -766,12 +765,13 @@ subroutine scatter_dyn(L_stressp_1 , L_stressp_2 , L_stressp_3 , L_stressp_4 , & end subroutine scatter_dyn !============================================================================= - subroutine convert_2d_1d_init(na0, G_HTE, G_HTN, G_uarear, G_dxT, G_dyT) + subroutine convert_2d_1d_init(na0, G_uarear, G_dxT, G_dyT, G_cxp, G_cyp, G_cxm, G_cym, G_dxhy, G_dyhx) implicit none integer(kind=int_kind), intent(in) :: na0 - real (kind=dbl_kind), dimension(:, :), intent(in) :: G_HTE, G_HTN, G_uarear, G_dxT, G_dyT + real (kind=dbl_kind), dimension(:, :), intent(in) :: & + G_uarear, G_dxT, G_dyT, G_cxp, G_cyp, G_cxm, G_cym, G_dxhy, G_dyhx ! local variables integer(kind=int_kind) :: iw, lo, up, j, i @@ -835,10 +835,12 @@ subroutine convert_2d_1d_init(na0, G_HTE, G_HTN, G_uarear, G_dxT, G_dyT) uarear(iw) = G_uarear(i, j) dxT(iw) = G_dxT(i, j) dyT(iw) = G_dyT(i, j) - HTE(iw) = G_HTE(i, j) - HTN(iw) = G_HTN(i, j) - HTEm1(iw) = G_HTE(i - 1, j) - HTNm1(iw) = G_HTN(i, j - 1) + cxp(iw) = G_cxp(i, j) + cyp(iw) = G_cyp(i, j) + cxm(iw) = G_cxm(i, j) + cym(iw) = G_cym(i, j) + dxhy(iw) = G_dxhy(i, j) + dyhx(iw) = G_dyhx(i, j) end do end subroutine convert_2d_1d_init @@ -1142,15 +1144,17 @@ subroutine numainit(lo,up,uu) aiu(iw)=c0 Cb(iw)=c0 cdn_ocn(iw)=c0 + cxp(iw)=c0 + cyp(iw)=c0 + cxm(iw)=c0 + cym(iw)=c0 + dxhy(iw)=c0 + dyhx(iw)=c0 dxt(iw)=c0 dyt(iw)=c0 fmU(iw)=c0 forcexU(iw)=c0 forceyU(iw)=c0 - HTE(iw)=c0 - HTEm1(iw)=c0 - HTN(iw)=c0 - HTNm1(iw)=c0 strength(iw)= c0 stress12_1(iw)=c0 stress12_2(iw)=c0 diff --git a/cicecore/cicedyn/general/ice_init.F90 b/cicecore/cicedyn/general/ice_init.F90 index 0519141fb..0c3fc142e 100644 --- a/cicecore/cicedyn/general/ice_init.F90 +++ b/cicecore/cicedyn/general/ice_init.F90 @@ -105,7 +105,7 @@ subroutine input_data grid_ocn, grid_ocn_thrm, grid_ocn_dynu, grid_ocn_dynv, & grid_atm, grid_atm_thrm, grid_atm_dynu, grid_atm_dynv, & dxrect, dyrect, dxscale, dyscale, scale_dxdy, & - lonrefrect, latrefrect, pgl_global_ext + lonrefrect, latrefrect use ice_dyn_shared, only: ndte, kdyn, revised_evp, yield_curve, & evp_algorithm, visc_method, & seabed_stress, seabed_stress_method, & @@ -375,7 +375,6 @@ subroutine input_data ndte = 120 ! subcycles per dynamics timestep: ndte=dt_dyn/dte evp_algorithm = 'standard_2d' ! EVP kernel (standard_2d=standard cice evp; shared_mem_1d=1d shared memory and no mpi elasticDamp = 0.36_dbl_kind ! coefficient for calculating the parameter E - pgl_global_ext = .false. ! if true, init primary grid lengths (global ext.) brlx = 300.0_dbl_kind ! revised_evp values. Otherwise overwritten in ice_dyn_shared arlx = 300.0_dbl_kind ! revised_evp values. Otherwise overwritten in ice_dyn_shared revised_evp = .false. ! if true, use revised procedure for evp dynamics @@ -962,7 +961,6 @@ subroutine input_data call broadcast_scalar(ndte, master_task) call broadcast_scalar(evp_algorithm, master_task) call broadcast_scalar(elasticDamp, master_task) - call broadcast_scalar(pgl_global_ext, master_task) call broadcast_scalar(brlx, master_task) call broadcast_scalar(arlx, master_task) call broadcast_scalar(revised_evp, master_task) @@ -1840,11 +1838,10 @@ subroutine input_data tmpstr2 = ' : standard 2d EVP solver' elseif (evp_algorithm == 'shared_mem_1d') then tmpstr2 = ' : vectorized 1d EVP solver' - pgl_global_ext = .true. else tmpstr2 = ' : unknown value' endif - write(nu_diag,1031) ' evp_algorithm = ', trim(evp_algorithm),trim(tmpstr2) + write(nu_diag,1030) ' evp_algorithm = ', trim(evp_algorithm),trim(tmpstr2) write(nu_diag,1020) ' ndtd = ', ndtd, ' : number of dynamics/advection/ridging/steps per thermo timestep' write(nu_diag,1020) ' ndte = ', ndte, ' : number of EVP or EAP subcycles' endif @@ -2277,17 +2274,17 @@ subroutine input_data write(nu_diag,1010) ' use_smliq_pnd = ', use_smliq_pnd, trim(tmpstr2) if (snw_aging_table == 'test') then tmpstr2 = ' : Using 5x5x1 test matrix of internallly defined snow aging parameters' - write(nu_diag,1030) ' snw_aging_table = ', trim(snw_aging_table),trim(tmpstr2) + write(nu_diag,1030) ' snw_aging_table = ',trim(snw_aging_table),trim(tmpstr2) elseif (snw_aging_table == 'snicar') then tmpstr2 = ' : Reading 3D snow aging parameters from SNICAR file' - write(nu_diag,1030) ' snw_aging_table = ', trim(snw_aging_table),trim(tmpstr2) + write(nu_diag,1030) ' snw_aging_table = ',trim(snw_aging_table),trim(tmpstr2) write(nu_diag,1031) ' snw_filename = ',trim(snw_filename) write(nu_diag,1031) ' snw_tau_fname = ',trim(snw_tau_fname) write(nu_diag,1031) ' snw_kappa_fname = ',trim(snw_kappa_fname) write(nu_diag,1031) ' snw_drdt0_fname = ',trim(snw_drdt0_fname) elseif (snw_aging_table == 'file') then tmpstr2 = ' : Reading 1D and 3D snow aging dimensions and parameters from external file' - write(nu_diag,1030) ' snw_aging_table = ', trim(snw_aging_table),trim(tmpstr2) + write(nu_diag,1030) ' snw_aging_table = ',trim(snw_aging_table),trim(tmpstr2) write(nu_diag,1031) ' snw_filename = ',trim(snw_filename) write(nu_diag,1031) ' snw_rhos_fname = ',trim(snw_rhos_fname) write(nu_diag,1031) ' snw_Tgrd_fname = ',trim(snw_Tgrd_fname) diff --git a/cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 b/cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 index 2a7d68c11..9d0a1c8bd 100644 --- a/cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 +++ b/cicecore/cicedyn/infrastructure/comm/mpi/ice_boundary.F90 @@ -113,8 +113,7 @@ module ice_boundary ice_HaloUpdate, & ice_HaloUpdate_stress, & ice_HaloExtrapolate, & - ice_HaloDestroy, & - primary_grid_lengths_global_ext + ice_HaloDestroy interface ice_HaloUpdate ! generic interface module procedure ice_HaloUpdate2DR8, & @@ -7166,133 +7165,6 @@ subroutine ice_HaloDestroy(halo) endif end subroutine ice_HaloDestroy -!*********************************************************************** - - subroutine primary_grid_lengths_global_ext( & - ARRAY_O, ARRAY_I, ew_boundary_type, ns_boundary_type) - -! This subroutine adds ghost cells to global primary grid lengths array -! ARRAY_I and outputs result to array ARRAY_O - - use ice_constants, only: c0 - use ice_domain_size, only: nx_global, ny_global - - real (kind=dbl_kind), dimension(:,:), intent(in) :: & - ARRAY_I - - character (*), intent(in) :: & - ew_boundary_type, ns_boundary_type - - real (kind=dbl_kind), dimension(:,:), intent(out) :: & - ARRAY_O - -!----------------------------------------------------------------------- -! -! local variables -! -!----------------------------------------------------------------------- - - integer (kind=int_kind) :: & - ii, io, ji, jo - - character(len=*), parameter :: & - subname = '(primary_grid_lengths_global_ext)' - -!----------------------------------------------------------------------- -! -! add ghost cells to global primary grid lengths array -! -!----------------------------------------------------------------------- - - if (trim(ns_boundary_type) == 'tripole' .or. & - trim(ns_boundary_type) == 'tripoleT') then - call abort_ice(subname//' ERROR: '//ns_boundary_type & - //' boundary type not implemented for configuration') - endif - - do jo = 1,ny_global+2*nghost - ji = -nghost + jo - - !*** Southern ghost cells - - if (ji < 1) then - select case (trim(ns_boundary_type)) - case ('cyclic') - ji = ji + ny_global - case ('open') - ji = nghost - jo + 1 - case ('closed') - ji = 0 - case default - call abort_ice( & - subname//' ERROR: unknown north-south boundary type') - end select - endif - - !*** Northern ghost cells - - if (ji > ny_global) then - select case (trim(ns_boundary_type)) - case ('cyclic') - ji = ji - ny_global - case ('open') - ji = 2 * ny_global - ji + 1 - case ('closed') - ji = 0 - case default - call abort_ice( & - subname//' ERROR: unknown north-south boundary type') - end select - endif - - do io = 1,nx_global+2*nghost - ii = -nghost + io - - !*** Western ghost cells - - if (ii < 1) then - select case (trim(ew_boundary_type)) - case ('cyclic') - ii = ii + nx_global - case ('open') - ii = nghost - io + 1 - case ('closed') - ii = 0 - case default - call abort_ice( & - subname//' ERROR: unknown east-west boundary type') - end select - endif - - !*** Eastern ghost cells - - if (ii > nx_global) then - select case (trim(ew_boundary_type)) - case ('cyclic') - ii = ii - nx_global - case ('open') - ii = 2 * nx_global - ii + 1 - case ('closed') - ii = 0 - case default - call abort_ice( & - subname//' ERROR: unknown east-west boundary type') - end select - endif - - if (ii == 0 .or. ji == 0) then - ARRAY_O(io, jo) = c0 - else - ARRAY_O(io, jo) = ARRAY_I(ii, ji) - endif - - enddo - enddo - -!----------------------------------------------------------------------- - - end subroutine primary_grid_lengths_global_ext - !*********************************************************************** end module ice_boundary diff --git a/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 b/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 index faeaf3227..b9ac8fe33 100644 --- a/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 +++ b/cicecore/cicedyn/infrastructure/comm/serial/ice_boundary.F90 @@ -68,8 +68,7 @@ module ice_boundary ice_HaloUpdate, & ice_HaloUpdate_stress, & ice_HaloExtrapolate, & - ice_HaloDestroy, & - primary_grid_lengths_global_ext + ice_HaloDestroy interface ice_HaloUpdate ! generic interface module procedure ice_HaloUpdate2DR8, & @@ -4912,133 +4911,6 @@ subroutine ice_HaloDestroy(halo) end subroutine ice_HaloDestroy -!*********************************************************************** - - subroutine primary_grid_lengths_global_ext( & - ARRAY_O, ARRAY_I, ew_boundary_type, ns_boundary_type) - -! This subroutine adds ghost cells to global primary grid lengths array -! ARRAY_I and outputs result to array ARRAY_O - - use ice_constants, only: c0 - use ice_domain_size, only: nx_global, ny_global - - real (kind=dbl_kind), dimension(:,:), intent(in) :: & - ARRAY_I - - character (*), intent(in) :: & - ew_boundary_type, ns_boundary_type - - real (kind=dbl_kind), dimension(:,:), intent(out) :: & - ARRAY_O - -!----------------------------------------------------------------------- -! -! local variables -! -!----------------------------------------------------------------------- - - integer (kind=int_kind) :: & - ii, io, ji, jo - - character(len=*), parameter :: & - subname = '(primary_grid_lengths_global_ext)' - -!----------------------------------------------------------------------- -! -! add ghost cells to global primary grid lengths array -! -!----------------------------------------------------------------------- - - if (trim(ns_boundary_type) == 'tripole' .or. & - trim(ns_boundary_type) == 'tripoleT') then - call abort_ice(subname//' ERROR: '//ns_boundary_type & - //' boundary type not implemented for configuration') - endif - - do jo = 1,ny_global+2*nghost - ji = -nghost + jo - - !*** Southern ghost cells - - if (ji < 1) then - select case (trim(ns_boundary_type)) - case ('cyclic') - ji = ji + ny_global - case ('open') - ji = nghost - jo + 1 - case ('closed') - ji = 0 - case default - call abort_ice( & - subname//' ERROR: unknown north-south boundary type') - end select - endif - - !*** Northern ghost cells - - if (ji > ny_global) then - select case (trim(ns_boundary_type)) - case ('cyclic') - ji = ji - ny_global - case ('open') - ji = 2 * ny_global - ji + 1 - case ('closed') - ji = 0 - case default - call abort_ice( & - subname//' ERROR: unknown north-south boundary type') - end select - endif - - do io = 1,nx_global+2*nghost - ii = -nghost + io - - !*** Western ghost cells - - if (ii < 1) then - select case (trim(ew_boundary_type)) - case ('cyclic') - ii = ii + nx_global - case ('open') - ii = nghost - io + 1 - case ('closed') - ii = 0 - case default - call abort_ice( & - subname//' ERROR: unknown east-west boundary type') - end select - endif - - !*** Eastern ghost cells - - if (ii > nx_global) then - select case (trim(ew_boundary_type)) - case ('cyclic') - ii = ii - nx_global - case ('open') - ii = 2 * nx_global - ii + 1 - case ('closed') - ii = 0 - case default - call abort_ice( & - subname//' ERROR: unknown east-west boundary type') - end select - endif - - if (ii == 0 .or. ji == 0) then - ARRAY_O(io, jo) = c0 - else - ARRAY_O(io, jo) = ARRAY_I(ii, ji) - endif - - enddo - enddo - -!----------------------------------------------------------------------- - - end subroutine primary_grid_lengths_global_ext - !*********************************************************************** end module ice_boundary diff --git a/cicecore/cicedyn/infrastructure/ice_grid.F90 b/cicecore/cicedyn/infrastructure/ice_grid.F90 index 16dea4382..35ba026de 100644 --- a/cicecore/cicedyn/infrastructure/ice_grid.F90 +++ b/cicecore/cicedyn/infrastructure/ice_grid.F90 @@ -24,8 +24,7 @@ module ice_grid use ice_kinds_mod use ice_broadcast, only: broadcast_scalar, broadcast_array - use ice_boundary, only: ice_HaloUpdate, ice_HaloExtrapolate, & - primary_grid_lengths_global_ext + use ice_boundary, only: ice_HaloUpdate, ice_HaloExtrapolate use ice_communicate, only: my_task, master_task use ice_blocks, only: block, get_block, nx_block, ny_block, nghost use ice_domain_size, only: nx_global, ny_global, max_blocks @@ -106,10 +105,6 @@ module ice_grid ocn_gridcell_frac ! only relevant for lat-lon grids ! gridcell value of [1 - (land fraction)] (T-cell) - real (kind=dbl_kind), dimension (:,:), allocatable, public :: & - G_HTE , & ! length of eastern edge of T-cell (global ext.) - G_HTN ! length of northern edge of T-cell (global ext.) - real (kind=dbl_kind), dimension (:,:,:), allocatable, public :: & cyp , & ! 1.5*HTE(i,j)-0.5*HTW(i,j) = 1.5*HTE(i,j)-0.5*HTE(i-1,j) cxp , & ! 1.5*HTN(i,j)-0.5*HTS(i,j) = 1.5*HTN(i,j)-0.5*HTN(i,j-1) @@ -180,7 +175,6 @@ module ice_grid logical (kind=log_kind), public :: & use_bathymetry, & ! flag for reading in bathymetry_file - pgl_global_ext, & ! flag for init primary grid lengths (global ext.) scale_dxdy ! flag to apply scale factor to vary dx/dy in rectgrid logical (kind=log_kind), dimension (:,:,:), allocatable, public :: & @@ -300,14 +294,6 @@ subroutine alloc_grid if (ierr/=0) call abort_ice(subname//'ERROR: Out of memory') endif - if (pgl_global_ext) then - allocate( & - G_HTE(nx_global+2*nghost, ny_global+2*nghost), & ! length of eastern edge of T-cell (global ext.) - G_HTN(nx_global+2*nghost, ny_global+2*nghost), & ! length of northern edge of T-cell (global ext.) - stat=ierr) - if (ierr/=0) call abort_ice(subname//'ERROR: Out of memory') - endif - end subroutine alloc_grid !======================================================================= @@ -2016,10 +2002,6 @@ subroutine primary_grid_lengths_HTN(work_g) enddo enddo endif - if (pgl_global_ext) then - call primary_grid_lengths_global_ext( & - G_HTN, work_g, ew_boundary_type, ns_boundary_type) - endif call scatter_global(HTN, work_g, master_task, distrb_info, & field_loc_Nface, field_type_scalar) call scatter_global(dxU, work_g2, master_task, distrb_info, & @@ -2124,10 +2106,6 @@ subroutine primary_grid_lengths_HTE(work_g) enddo endif endif - if (pgl_global_ext) then - call primary_grid_lengths_global_ext( & - G_HTE, work_g, ew_boundary_type, ns_boundary_type) - endif call scatter_global(HTE, work_g, master_task, distrb_info, & field_loc_Eface, field_type_scalar) call scatter_global(dyU, work_g2, master_task, distrb_info, &