Skip to content

Commit

Permalink
Update fms_mixedmode to gfdl_atmos_cubed_sphere/dev/emc (NOAA-GFDL#61)
Browse files Browse the repository at this point in the history
  • Loading branch information
MinsukJi-NOAA authored Nov 10, 2021
1 parent 122c2c5 commit 18586e7
Show file tree
Hide file tree
Showing 19 changed files with 1,621 additions and 133 deletions.
2 changes: 2 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ endif()
list(APPEND model_srcs
model/a2b_edge.F90
model/multi_gases.F90
model/molecular_diffusion.F90
model/boundary.F90
model/dyn_core.F90
model/fv_arrays.F90
Expand Down Expand Up @@ -169,6 +170,7 @@ add_library(FV3::fv3 ALIAS fv3)

set_property(SOURCE model/nh_utils.F90 APPEND_STRING PROPERTY COMPILE_FLAGS "${FAST}")
set_property(SOURCE model/fv_mapz.F90 APPEND_STRING PROPERTY COMPILE_FLAGS "${FAST}")
set_property(SOURCE tools/fv_diagnostics.F90 APPEND_STRING PROPERTY COMPILE_FLAGS "-O0")

set_target_properties(fv3 PROPERTIES Fortran_MODULE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/include/fv3)
target_include_directories(fv3 INTERFACE $<BUILD_INTERFACE:${CMAKE_CURRENT_BINARY_DIR}/include/fv3>
Expand Down
18 changes: 15 additions & 3 deletions driver/fvGFS/fv_nggps_diag.F90
Original file line number Diff line number Diff line change
Expand Up @@ -122,9 +122,7 @@ module fv_nggps_diags_mod
logical :: use_wrtgridcomp_output=.false.
integer :: sphum, liq_wat, ice_wat !< GFDL physics
integer :: rainwat, snowwat, graupel
real :: vrange(2) = (/ -330., 330. /) !< winds
real :: wrange(2) = (/ -100., 100. /) !< vertical wind
real :: trange(2) = (/ 100., 350. /) !< temperature
real :: vrange(2), wrange(2), trange(2)
real :: skrange(2) = (/ -10000000.0, 10000000.0 /) !< dissipation estimate for SKEB

! file name
Expand Down Expand Up @@ -181,6 +179,20 @@ subroutine fv_nggps_diag_init(Atm, axes, Time)
kstt_tracer(:) = 0
kend_tracer(:) = 0

if ( Atm(n)%flagstruct%molecular_diffusion ) then
vrange = (/ -830., 830. /) !< winds for wam
wrange = (/ -300., 300. /) !< vertical wind for wam
trange = (/ 100., 3000. /) !< temperature for wam
else
vrange = (/ -330., 330. /) !< winds
wrange = (/ -100., 100. /) !< vertical wind
#ifdef MULTI_GASES
trange = (/ 100., 3000. /) !< temperature
#else
trange = (/ 100., 350. /) !< temperature
#endif
endif

if (Atm(n)%flagstruct%write_3d_diags) then
!-------------------
! A grid winds (lat-lon)
Expand Down
211 changes: 200 additions & 11 deletions model/dyn_core.F90
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,9 @@ module dyn_core_mod
use fv_mp_mod, only: is_master
use fv_mp_mod, only: start_group_halo_update, complete_group_halo_update
use fv_mp_mod, only: group_halo_update_type
use sw_core_mod, only: c_sw, d_sw
use molecular_diffusion_mod, &
only: md_time, md_layers, md_consv_te, md_tadj_layers
use sw_core_mod, only: c_sw, d_sw, d_md
use a2b_edge_mod, only: a2b_ord2, a2b_ord4
use nh_core_mod, only: Riem_Solver3, Riem_Solver_C, update_dz_c, update_dz_d, nh_bc
use tp_core_mod, only: copy_corners
Expand Down Expand Up @@ -139,7 +141,7 @@ module dyn_core_mod
use test_cases_mod, only: test_case, case9_forcing1, case9_forcing2
#endif
#ifdef MULTI_GASES
use multi_gases_mod, only: virqd, vicvqd
use multi_gases_mod, only: virqd, vicpqd, vicvqd, virq, vicvq
#endif
use fv_regional_mod, only: dump_field, exch_uv, H_STAGGER, U_STAGGER, V_STAGGER
use fv_regional_mod, only: a_step, p_step, k_step, n_step
Expand Down Expand Up @@ -199,7 +201,7 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp,
real, intent(inout), dimension(bd%isd:bd%ied+1,bd%jsd:bd%jed ,npz):: v !< D grid meridional wind (m/s)
real, intent(inout) :: w( bd%isd:,bd%jsd:,1:) !< vertical vel. (m/s)
real, intent(inout) :: delz(bd%is:,bd%js:,1:) !< delta-height (m, negative)
real, intent(inout) :: cappa(bd%isd:,bd%jsd:,1:) !< moist kappa
real, intent(inout) :: cappa(bd%isd:bd%ied,bd%jsd:bd%jed,1:npz) !< moist kappa
#ifdef MULTI_GASES
real, intent(inout) :: kapad(bd%isd:bd%ied,bd%jsd:bd%jed,1:npz) !< multi_gases kappa
#endif
Expand Down Expand Up @@ -1222,6 +1224,7 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp,
! Prevent accumulation of rounding errors at overlapped domain edges:
call mpp_get_boundary(u, v, domain, ebuffery=ebuffer, &
nbufferx=nbuffer, gridtype=DGRID_NE )
call timing_off('COMM_TOTAL')
!$OMP parallel do default(none) shared(is,ie,js,je,npz,u,nbuffer,v,ebuffer)
do k=1,npz
do i=is,ie
Expand All @@ -1234,6 +1237,16 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp,

endif

if ( flagstruct%molecular_diffusion .and. md_time ) then
! -----------------------------------------------------
! direct explicit molecular diffusion
! -----------------------------------------------------
call molecular_diffusion_run(u, v, w, delp, pt, pkz, cappa, q, bd, &
gridstruct, flagstruct, domain, i_pack, npx, npy, npz, nq, dt, it, akap, zvir, cv_air)
endif
! -------------------------------------------------

call timing_on('COMM_TOTAL')
#ifndef ROT3
if ( it/=n_split) &
call start_group_halo_update(i_pack(8), u, v, domain, gridtype=DGRID_NE)
Expand All @@ -1243,9 +1256,9 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp,
#ifdef SW_DYNAMICS
endif
#endif
if ( gridstruct%nested ) then
if ( gridstruct%nested ) then
neststruct%nest_timestep = neststruct%nest_timestep + 1
endif
endif

#ifdef SW_DYNAMICS
#else
Expand Down Expand Up @@ -1298,13 +1311,12 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp,
if (gridstruct%nested) then



#ifndef SW_DYNAMICS
if (.not. hydrostatic) then
if (.not. hydrostatic) then
call nested_grid_BC_apply_intT(w, &
0, 0, npx, npy, npz, bd, split_timestep_BC+1, real(n_split*flagstruct%k_split), &
neststruct%w_BC, bctype=neststruct%nestbctype )
end if
end if
#endif SW_DYNAMICS
call nested_grid_BC_apply_intT(u, &
0, 1, npx, npy, npz, bd, split_timestep_BC+1, real(n_split*flagstruct%k_split), &
Expand All @@ -1313,9 +1325,9 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp,
1, 0, npx, npy, npz, bd, split_timestep_BC+1, real(n_split*flagstruct%k_split), &
neststruct%v_BC, bctype=neststruct%nestbctype )

end if
end if

if (flagstruct%regional) then
if (flagstruct%regional) then

#ifndef SW_DYNAMICS
if (.not. hydrostatic) then
Expand All @@ -1340,7 +1352,8 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_map, n_split, zvir, cp,
reg_bc_update_time )

call mpp_update_domains(u, v, domain, gridtype=DGRID_NE)
end if

endif

if ( do_diag_debug_dyn ) then
call debug_column_dyn( pt, delp, delz, u, v, w, q, heat_source, cappa, akap, &
Expand Down Expand Up @@ -2784,5 +2797,181 @@ subroutine gz_bc(gz,delzBC,bd,npx,npy,npz,step,split)

end subroutine gz_bc

subroutine molecular_diffusion_run(u,v,w,delp,pt,pkz,cappa,q,bd, &
gridstruct,flagstruct,domain,i_pack,npx,npy,npz, &
nq,dt,it,akap,zvir,cv_air)
type(fv_grid_bounds_type), intent(IN) :: bd
real, intent(inout):: pkz(bd%is:bd%ie,bd%js:bd%je,npz)
real, intent(inout):: pt(bd%isd:bd%ied,bd%jsd:bd%jed,npz)
real, intent(inout):: delp(bd%isd:bd%ied,bd%jsd:bd%jed,npz)
real, intent(inout):: cappa(bd%isd:bd%ied,bd%jsd:bd%jed,npz)
real, intent(inout):: w(bd%isd:bd%ied,bd%jsd:bd%jed,npz)
real, intent(inout):: u(bd%isd:bd%ied,bd%jsd:bd%jed+1,npz)
real, intent(inout):: v(bd%isd:bd%ied+1,bd%jsd:bd%jed,npz)
real, intent(inout):: q( bd%isd:bd%ied,bd%jsd:bd%jed,npz, nq)

integer, intent(in):: npx,npy,npz,nq,it
real, intent(in)::akap,zvir,dt,cv_air
type(fv_grid_type), intent(INOUT), target :: gridstruct
type(fv_flags_type), intent(IN), target :: flagstruct
type(domain2d), intent(inout) :: domain
type(group_halo_update_type), intent(inout) :: i_pack(*)

real :: pkzf(bd%isd:bd%ied,bd%jsd:bd%jed,npz)
real, dimension (bd%isd:bd%ied,bd%jsd:bd%jed) :: p, t, e
integer :: i, j, k
integer :: is, ie, js, je
integer :: isd, ied, jsd, jed

is = bd%is
ie = bd%ie
js = bd%js
je = bd%je
isd = bd%isd
ied = bd%ied
jsd = bd%jsd
jed = bd%jed

! -----------------------------------------------------
! ------- update halo to prepare for diffusion -------
pkzf = 0.0
do k=1,npz
do j=js,je
pkzf(is:ie,j,k) = pkz(is:ie,j,k)
enddo
enddo

call timing_on('COMM_TOTAL')
call start_group_halo_update(i_pack(1),delp, domain, complete=.false.)
call start_group_halo_update(i_pack(1), pt, domain, complete=.true.)
call start_group_halo_update(i_pack(2),pkzf, domain)
call start_group_halo_update(i_pack(7), w, domain)
call start_group_halo_update(i_pack(8), u, v, domain, gridtype=DGRID_NE)
if ( nq > 0 ) then
call timing_on('COMM_TRACER')
call start_group_halo_update(i_pack(10), q, domain)
call timing_off('COMM_TRACER')
endif
#ifdef MOIST_CAPPA
call start_group_halo_update(i_pack(12), cappa, domain)
#endif

call complete_group_halo_update(i_pack(1), domain) ! delp. pt
call complete_group_halo_update(i_pack(2), domain) ! pkzf
call complete_group_halo_update(i_pack(7), domain) ! w
call complete_group_halo_update(i_pack(8), domain)
if ( nq>0 ) then
call timing_on('COMM_TRACER')
call complete_group_halo_update(i_pack(10), domain)
call timing_off('COMM_TRACER')
endif
#ifdef MOIST_CAPPA
call complete_group_halo_update(i_pack(12), domain)
#endif
call timing_off('COMM_TOTAL')

if( flagstruct%nord>0 .and. (.not. (flagstruct%regional))) then
i=mod(it-1,2)+1 ! alternatively to avoid bias
do k=1,npz
call copy_corners(pt(isd,jsd,k), npx, npy, i, gridstruct%nested, bd, &
gridstruct%sw_corner, gridstruct%se_corner, &
gridstruct%nw_corner, gridstruct%ne_corner)
call copy_corners(pkzf(isd,jsd,k), npx, npy, i, gridstruct%nested, bd, &
gridstruct%sw_corner, gridstruct%se_corner, &
gridstruct%nw_corner, gridstruct%ne_corner)
call copy_corners(cappa(isd,jsd,k), npx, npy, i, gridstruct%nested,bd, &
gridstruct%sw_corner, gridstruct%se_corner, &
gridstruct%nw_corner, gridstruct%ne_corner)
enddo
endif
call timing_on('d_md')

!$OMP parallel do default(none) shared(npz,flagstruct,gridstruct,bd, &
!$OMP it,dt,is,ie,js,je,isd,ied,jsd,jed, &
!$OMP pt,u,v,w,q,pkz,pkzf,cappa,akap,nq, &
!$OMP zvir,cv_air,md_layers,md_consv_te) &
!$OMP private(k,i,j,p,t,e)
! ----------------
do k=1, md_layers
! ----------------

! ------- prepare p and t for molecular diffusion coefficients

do j=jsd,jed
do i=isd,ied
t(i,j) = pt(i,j,k) * pkzf(i,j,k)
#ifdef MULTI_GASES
t(i,j) = t(i,j) / virq(q(i,j,k,:))
#else
t(i,j) = t(i,j) / (1+zvir*q(i,j,k,1))
#endif
#ifdef MOIST_CAPPA
p(i,j) = exp( log(pkzf(i,j,k)) / cappa(i,j,k) )
#else
#ifdef MULTI_GASES
p(i,j) = exp( log(pkzf(i,j,k)) / &
(akap*virqd(q(i,j,k,:))/vicpqd(q(i,j,k,:))) )
#else
p(i,j) = exp( log(pkzf(i,j,k)) / akap )
#endif
#endif
if( md_consv_te .gt. 0.0 ) &
e(i,j) = 0.5*(w(i,j,k)**2 + 0.5*gridstruct%rsin2(i,j)*( &
u(i,j,k)**2+u(i,j+1,k)**2 + v(i,j,k)**2+v(i+1,j,k)**2 -&
(u(i,j,k)+u(i,j+1,k))*(v(i,j,k)+v(i+1,j,k))* &
gridstruct%cosa_s(i,j)))
enddo
enddo

! compute molecular diffusion with implicit time and dimensional splits

call d_md( p(isd,jsd), t(isd,jsd), &
u(isd,jsd,k), v(isd,jsd,k), w(isd:,jsd:,k), q, &
it, nq, k, npz, dt, &
gridstruct, flagstruct, bd)

do j=js,je
do i=is,ie
if( md_consv_te .gt. 0.0 ) then
e(i,j) = e(i,j) - &
0.5*(w(i,j,k)**2 + 0.5*gridstruct%rsin2(i,j)*( &
u(i,j,k)**2+u(i,j+1,k)**2 + v(i,j,k)**2+v(i+1,j,k)**2 -&
(u(i,j,k)+u(i,j+1,k))*(v(i,j,k)+v(i+1,j,k))* &
gridstruct%cosa_s(i,j)))
#ifdef MOIST_CAPPA
t(i,j) = t(i,j) + e(i,j) / &
#ifdef MULTI_GASES
(rdgas* virq(q(i,j,k,:)) *(1./cappa(i,j,k)-1.))
#else
(rdgas*(1+zvir*q(i,j,k,1))*(1./cappa(i,j,k)-1.))
#endif
endif
pkz(i,j,k) = exp( log(p(i,j)) * cappa(i,j,k) )
#else
#ifdef MULTI_GASES
t(i,j) = t(i,j) + e(i,j) / (cv_air*vicvqd(q(i,j,k,:)))
endif
pkz(i,j,k) = exp( log(p(i,j)) * &
(akap*virqd(q(i,j,k,:))/vicpqd(q(i,j,k,:))) )
#else
t(i,j) = t(i,j) + e(i,j)/cv_air
endif
pkz(i,j,k) = exp( log(p(i,j)) * akap )
#endif
#endif
#ifdef MULTI_GASES
t(i,j) = t(i,j) * virq(q(i,j,k,:))
#else
t(i,j) = t(i,j) * (1+zvir*q(i,j,k,1))
#endif
pt(i,j,k) = t(i,j) / pkz(i,j,k)
enddo
enddo

! -------------------------------------------------
enddo ! k loop of 2d molecular diffusion
! -------------------------------------------------
return
end subroutine molecular_diffusion_run

end module dyn_core_mod
Loading

0 comments on commit 18586e7

Please sign in to comment.