Skip to content

Commit

Permalink
Merge pull request NCAR#2 from climbfuji/update_gsd_develop_from_dev_emc
Browse files Browse the repository at this point in the history
gsd/develop: update from NOAA-EMC dev/emc
  • Loading branch information
DomHeinzeller authored May 20, 2020
2 parents e53a24b + 51fa166 commit 0421431
Show file tree
Hide file tree
Showing 15 changed files with 16,699 additions and 698 deletions.
114 changes: 54 additions & 60 deletions driver/fvGFS/atmosphere.F90
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ module atmosphere_mod
! <tr>
! <td>mpp_mod</td>
! <td>mpp_error, stdout, FATAL, NOTE, input_nml_file, mpp_root_pe,
! mpp_npes, mpp_pe, mpp_chksum,mpp_get_current_pelist,
! mpp_npes, mpp_pe, mpp_chksum,mpp_get_current_pelist,
! mpp_set_current_pelist</td>
! </tr>
! <tr>
Expand Down Expand Up @@ -183,7 +183,7 @@ module atmosphere_mod
use fv_nggps_diags_mod, only: fv_nggps_diag_init, fv_nggps_diag, fv_nggps_tavg
use fv_restart_mod, only: fv_restart, fv_write_restart
use fv_timing_mod, only: timing_on, timing_off
use fv_mp_mod, only: switch_current_Atm
use fv_mp_mod, only: switch_current_Atm, is_master
use fv_sg_mod, only: fv_subgrid_z
use fv_update_phys_mod, only: fv_update_phys
use fv_nwp_nudge_mod, only: fv_nwp_nudge_init, fv_nwp_nudge_end, do_adiabatic_init
Expand All @@ -194,6 +194,7 @@ module atmosphere_mod
a_step, p_step, current_time_in_seconds

use mpp_domains_mod, only: mpp_get_data_domain, mpp_get_compute_domain
use fv_grid_utils_mod, only: g_sum

implicit none
private
Expand Down Expand Up @@ -269,30 +270,13 @@ module atmosphere_mod
!! and diagnostics.
subroutine atmosphere_init (Time_init, Time, Time_step, Grid_box, area)
#ifdef CCPP
#ifdef STATIC
! For static builds, the ccpp_physics_{init,run,finalize} calls
! are not pointing to code in the CCPP framework, but to auto-generated
! ccpp_suite_cap and ccpp_group_*_cap modules behind a ccpp_static_api
use ccpp_api, only: ccpp_init
use ccpp_static_api, only: ccpp_physics_init
#else
use iso_c_binding, only: c_loc
use ccpp_api, only: ccpp_init, &
ccpp_physics_init, &
ccpp_field_add, &
ccpp_error
#endif
use CCPP_data, only: ccpp_suite, &
cdata => cdata_tile, &
CCPP_interstitial
#ifdef OPENMP
use omp_lib
#endif
#ifndef STATIC
! Begin include auto-generated list of modules for ccpp
#include "ccpp_modules_fast_physics.inc"
! End include auto-generated list of modules for ccpp
#endif
#endif
type (time_type), intent(in) :: Time_init, Time, Time_step
type(grid_box_type), intent(inout) :: Grid_box
Expand Down Expand Up @@ -442,15 +426,8 @@ subroutine atmosphere_init (Time_init, Time, Time_step, Grid_box, area)
#ifdef CCPP
! Do CCPP fast physics initialization before call to adiabatic_init (since this calls fv_dynamics)

! Initialize the cdata structure
call ccpp_init(trim(ccpp_suite), cdata, ierr)
if (ierr/=0) then
cdata%errmsg = ' atmosphere_dynamics: error in ccpp_init: ' // trim(cdata%errmsg)
call mpp_error (FATAL, cdata%errmsg)
end if

! For fast physics running over the entire domain, block and thread
! number are not used; set to safe values
! For fast physics running over the entire domain, block
! and thread number are not used; set to safe values
cdata%blk_no = 1
cdata%thrd_no = 1

Expand All @@ -467,6 +444,7 @@ subroutine atmosphere_init (Time_init, Time, Time_step, Grid_box, area)
Atm(mytile)%npz, Atm(mytile)%ng, &
dt_atmos, p_split, Atm(mytile)%flagstruct%k_split, &
zvir, Atm(mytile)%flagstruct%p_ref, Atm(mytile)%ak, Atm(mytile)%bk, &
liq_wat>0, ice_wat>0, rainwat>0, snowwat>0, graupel>0, &
cld_amt>0, kappa, Atm(mytile)%flagstruct%hydrostatic, &
Atm(mytile)%flagstruct%do_sat_adj, &
Atm(mytile)%delp, Atm(mytile)%delz, Atm(mytile)%gridstruct%area_64, &
Expand All @@ -486,18 +464,9 @@ subroutine atmosphere_init (Time_init, Time, Time_step, Grid_box, area)
#endif
mpirank=mpp_pe(), mpiroot=mpp_root_pe())

#ifndef STATIC
! Populate cdata structure with fields required to run fast physics (auto-generated).
#include "ccpp_fields_fast_physics.inc"
#endif

if (Atm(mytile)%flagstruct%do_sat_adj) then
! Initialize fast physics
#ifdef STATIC
call ccpp_physics_init(cdata, suite_name=trim(ccpp_suite), group_name="fast_physics", ierr=ierr)
#else
call ccpp_physics_init(cdata, group_name="fast_physics", ierr=ierr)
#endif
if (ierr/=0) then
cdata%errmsg = ' atmosphere_dynamics: error in ccpp_physics_init for group fast_physics: ' // trim(cdata%errmsg)
call mpp_error (FATAL, cdata%errmsg)
Expand All @@ -508,7 +477,7 @@ subroutine atmosphere_init (Time_init, Time, Time_step, Grid_box, area)
! --- initiate the start for a restarted regional forecast
if ( Atm(mytile)%gridstruct%regional .and. Atm(mytile)%flagstruct%warm_start ) then

call start_regional_restart(Atm(1), &
call start_regional_restart(Atm(1), dt_atmos, &
isc, iec, jsc, jec, &
isd, ied, jsd, jed )
endif
Expand Down Expand Up @@ -714,15 +683,8 @@ end subroutine atmosphere_dynamics
!! FV3 dynamical core responsible for writing out a restart and final diagnostic state.
subroutine atmosphere_end (Time, Grid_box, restart_endfcst)
#ifdef CCPP
#ifdef STATIC
! For static builds, the ccpp_physics_{init,run,finalize} calls
! are not pointing to code in the CCPP framework, but to auto-generated
! ccpp_suite_cap and ccpp_group_*_cap modules behind a ccpp_static_api
use ccpp_static_api, only: ccpp_physics_finalize
use CCPP_data, only: ccpp_suite
#else
use ccpp_api, only: ccpp_physics_finalize
#endif
use CCPP_data, only: cdata => cdata_tile
#endif
type (time_type), intent(in) :: Time
Expand All @@ -734,11 +696,7 @@ subroutine atmosphere_end (Time, Grid_box, restart_endfcst)

if (Atm(mytile)%flagstruct%do_sat_adj) then
! Finalize fast physics
#ifdef STATIC
call ccpp_physics_finalize(cdata, suite_name=trim(ccpp_suite), group_name="fast_physics", ierr=ierr)
#else
call ccpp_physics_finalize(cdata, group_name="fast_physics", ierr=ierr)
#endif
if (ierr/=0) then
cdata%errmsg = ' atmosphere_dynamics: error in ccpp_physics_finalize for group fast_physics: ' // trim(cdata%errmsg)
call mpp_error (FATAL, cdata%errmsg)
Expand Down Expand Up @@ -1268,7 +1226,6 @@ subroutine atmosphere_get_bottom_layer (Atm_block, DYCORE_Data)
rrg = rdgas / grav

if (first_time) then
print *, 'calculating slp kr value'
! determine 0.8 sigma reference level
sigtop = Atm(mytile)%ak(1)/pstd_mks+Atm(mytile)%bk(1)
do k = 1, npz
Expand Down Expand Up @@ -1395,6 +1352,7 @@ subroutine atmosphere_state_update (Time, IPD_Data, IAU_Data, Atm_block, flip_vc
integer :: i, j, ix, k, k1, n, w_diff, nt_dyn, iq
integer :: nb, blen, nwat, dnats, nq_adv
real(kind=kind_phys):: rcp, q0, qwat(nq), qt, rdt
real psum, qsum, psumb, qsumb, betad
Time_prev = Time
Time_next = Time + Time_step_atmos
rdt = 1.d0 / dt_atmos
Expand All @@ -1407,6 +1365,18 @@ subroutine atmosphere_state_update (Time, IPD_Data, IAU_Data, Atm_block, flip_vc
if( nq<3 ) call mpp_error(FATAL, 'GFS phys must have 3 interactive tracers')

if (IAU_Data%in_interval) then
if (IAU_Data%drymassfixer) then
! global mean total pressure and water before IAU
psumb = g_sum(Atm(n)%domain,sum(Atm(n)%delp(isc:iec,jsc:jec,1:npz),dim=3),&
isc,iec,jsc,jec,Atm(n)%ng,Atm(n)%gridstruct%area_64,1,reproduce=.true.)
qsumb = g_sum(Atm(n)%domain,&
sum(Atm(n)%delp(isc:iec,jsc:jec,1:npz)*sum(Atm(n)%q(isc:iec,jsc:jec,1:npz,1:nwat),4),dim=3),&
isc,iec,jsc,jec,Atm(n)%ng,Atm(n)%gridstruct%area_64,1,reproduce=.true.)
if (is_master()) then
print *,'dry ps before IAU/physics',psumb+Atm(n)%ptop-qsumb
endif
endif

! IAU increments are in units of 1/sec

! add analysis increment to u,v,t tendencies
Expand Down Expand Up @@ -1469,11 +1439,11 @@ subroutine atmosphere_state_update (Time, IPD_Data, IAU_Data, Atm_block, flip_vc
call fill_gfs(blen, npz, IPD_Data(nb)%Statein%prsi, IPD_Data(nb)%Stateout%gq0, 1.e-9_kind_phys)

do k = 1, npz
if(flip_vc) then
k1 = npz+1-k !reverse the k direction
else
k1 = k
endif
if(flip_vc) then
k1 = npz+1-k !reverse the k direction
else
k1 = k
endif
do ix = 1, blen
i = Atm_block%index(nb)%ii(ix)
j = Atm_block%index(nb)%jj(ix)
Expand Down Expand Up @@ -1515,11 +1485,11 @@ subroutine atmosphere_state_update (Time, IPD_Data, IAU_Data, Atm_block, flip_vc
!--- See Note in statein...
do iq = nq+1, ncnst
do k = 1, npz
if(flip_vc) then
k1 = npz+1-k !reverse the k direction
else
k1 = k
endif
if(flip_vc) then
k1 = npz+1-k !reverse the k direction
else
k1 = k
endif
do ix = 1, blen
i = Atm_block%index(nb)%ii(ix)
j = Atm_block%index(nb)%jj(ix)
Expand All @@ -1530,6 +1500,29 @@ subroutine atmosphere_state_update (Time, IPD_Data, IAU_Data, Atm_block, flip_vc

enddo ! nb-loop

! dry mass fixer in IAU interval following
! https://onlinelibrary.wiley.com/doi/full/10.1111/j.1600-0870.2007.00299.x
if (IAU_Data%in_interval .and. IAU_data%drymassfixer) then
! global mean total pressure
psum = g_sum(Atm(n)%domain,sum(Atm(n)%delp(isc:iec,jsc:jec,1:npz),dim=3),&
isc,iec,jsc,jec,Atm(n)%ng,Atm(n)%gridstruct%area_64,1,reproduce=.true.)
! global mean total water (before adjustment)
qsum = g_sum(Atm(n)%domain,&
sum(Atm(n)%delp(isc:iec,jsc:jec,1:npz)*sum(Atm(n)%q(isc:iec,jsc:jec,1:npz,1:nwat),4),dim=3),&
isc,iec,jsc,jec,Atm(n)%ng,Atm(n)%gridstruct%area_64,1,reproduce=.true.)
betad = (psum - (psumb - qsumb))/qsum
if (is_master()) then
print *,'dry ps after IAU/physics',psum+Atm(n)%ptop-qsum
endif
Atm(n)%q(:,:,:,1:nwat) = betad*Atm(n)%q(:,:,:,1:nwat)
!qsum = g_sum(Atm(n)%domain,&
! sum(Atm(n)%delp(isc:iec,jsc:jec,1:npz)*sum(Atm(n)%q(isc:iec,jsc:jec,1:npz,1:nwat),4),dim=3),&
! isc,iec,jsc,jec,Atm(n)%ng,Atm(n)%gridstruct%area_64,1)
!if (is_master()) then
! print *,'dry ps after iau_drymassfixer',psum+Atm(n)%ptop-qsum
!endif
endif

call timing_off('GFS_TENDENCIES')

w_diff = get_tracer_index (MODEL_ATMOS, 'w_diff' )
Expand Down Expand Up @@ -1593,6 +1586,7 @@ subroutine atmosphere_state_update (Time, IPD_Data, IAU_Data, Atm_block, flip_vc

call nullify_domain()
call timing_on('FV_DIAG')

call fv_diag(Atm(mytile:mytile), zvir, fv_time, Atm(mytile)%flagstruct%print_freq)
first_diag = .false.
call timing_off('FV_DIAG')
Expand Down
2 changes: 1 addition & 1 deletion makefile
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ endif

LIBRARY = libfv3core.a

FFLAGS += -I$(FMS_DIR) -I../gfsphysics -I../ipd -I../io -I../namphysics
FFLAGS += -I$(FMS_DIR) -I../gfsphysics -I../ipd -I../io

SRCS_f =

Expand Down
6 changes: 3 additions & 3 deletions model/dyn_core.F90
Original file line number Diff line number Diff line change
Expand Up @@ -364,10 +364,10 @@ subroutine dyn_core(npx, npy, npz, ng, sphum, nq, bdt, n_split, zvir, cp, akap,
allocate( dv(isd:ied+1,jsd:jed, npz) )
call init_ijk_mem(isd,ied+1, jsd,jed , npz, dv, 0.)
endif
!$OMP parallel do default(none) shared(is,ie,js,je,npz,diss_est)
!$OMP parallel do default(none) shared(isd,ied,jsd,jed,npz,diss_est)
do k=1,npz
do j=js,je
do i=is,ie
do j=jsd,jed
do i=isd,ied
diss_est(i,j,k) = 0.
enddo
enddo
Expand Down
5 changes: 5 additions & 0 deletions model/fv_arrays.F90
Original file line number Diff line number Diff line change
Expand Up @@ -985,6 +985,7 @@ module fv_arrays_mod
!< The default value is 4 (recommended); fourth-order interpolation
!< is used unless c2l_ord = 2.

integer :: nrows_blend = 0 !< # of blending rows in the outer integration domain.

real(kind=R_GRID) :: dx_const = 1000. !< Specifies the (uniform) grid-cell-width in the x-direction
!< on a doubly-periodic grid (grid_type = 4) in meters.
Expand All @@ -1006,6 +1007,10 @@ module fv_arrays_mod

integer :: bc_update_interval = 3 !< Default setting for interval (hours) between external regional BC data files.

logical :: regional_bcs_from_gsi = .false. !< Default setting for using DA-updated BC files.

logical :: write_restart_with_bcs = .false. !< Default setting for writing restart files with boundary rows.

!>Convenience pointers
integer, pointer :: grid_number

Expand Down
15 changes: 14 additions & 1 deletion model/fv_control.F90
Original file line number Diff line number Diff line change
Expand Up @@ -331,6 +331,9 @@ module fv_control_mod
logical, pointer :: nested, twowaynest
logical, pointer :: regional
integer, pointer :: bc_update_interval
integer, pointer :: nrows_blend
logical, pointer :: regional_bcs_from_gsi
logical, pointer :: write_restart_with_bcs
integer, pointer :: parent_tile, refinement, nestbctype, nestupdate, nsponge, ioffset, joffset
real, pointer :: s_weight, update_blend

Expand Down Expand Up @@ -670,7 +673,8 @@ subroutine run_setup(Atm, dt_atmos, grids_on_this_pe, p_split)
nested, twowaynest, parent_grid_num, parent_tile, nudge_qv, &
refinement, nestbctype, nestupdate, nsponge, s_weight, &
ioffset, joffset, check_negative, nudge_ic, halo_update_type, gfs_phil, agrid_vel_rst, &
do_uni_zfull, adj_mass_vmr, fac_n_spl, fhouri, regional, bc_update_interval
do_uni_zfull, adj_mass_vmr, fac_n_spl, fhouri, regional, bc_update_interval, &
regional_bcs_from_gsi, write_restart_with_bcs, nrows_blend

namelist /test_case_nml/test_case, bubble_do, alpha, nsolitons, soliton_Umax, soliton_size
#ifdef MULTI_GASES
Expand Down Expand Up @@ -892,6 +896,12 @@ subroutine run_setup(Atm, dt_atmos, grids_on_this_pe, p_split)
Atm(n)%neststruct%refinement = -1
end if

if (Atm(n)%flagstruct%regional) then
if ( consv_te > 0.) then
call mpp_error(FATAL, 'The global energy fixer cannot be used on a regional grid. consv_te must be set to 0.')
end if
end if

if (Atm(n)%neststruct%nested) then
if (Atm(n)%flagstruct%grid_type >= 4 .and. Atm(n)%parent_grid%flagstruct%grid_type >= 4) then
Atm(n)%flagstruct%dx_const = Atm(n)%parent_grid%flagstruct%dx_const / real(Atm(n)%neststruct%refinement)
Expand Down Expand Up @@ -1233,6 +1243,9 @@ subroutine setup_pointers(Atm)
target_lon => Atm%flagstruct%target_lon
regional => Atm%flagstruct%regional
bc_update_interval => Atm%flagstruct%bc_update_interval
nrows_blend => Atm%flagstruct%nrows_blend
regional_bcs_from_gsi => Atm%flagstruct%regional_bcs_from_gsi
write_restart_with_bcs => Atm%flagstruct%write_restart_with_bcs
reset_eta => Atm%flagstruct%reset_eta
p_fac => Atm%flagstruct%p_fac
a_imp => Atm%flagstruct%a_imp
Expand Down
25 changes: 5 additions & 20 deletions model/fv_mapz.F90
Original file line number Diff line number Diff line change
Expand Up @@ -95,17 +95,10 @@ module fv_mapz_mod
#ifndef CCPP
use fv_cmp_mod, only: qs_init, fv_sat_adj
#else
#ifdef STATIC
! For static builds, the ccpp_physics_{init,run,finalize} calls
! are not pointing to code in the CCPP framework, but to auto-generated
! ccpp_suite_cap and ccpp_group_*_cap modules behind a ccpp_static_api
use ccpp_api, only: ccpp_initialized
use ccpp_static_api, only: ccpp_physics_run
use CCPP_data, only: ccpp_suite
#else
use ccpp_api, only: ccpp_initialized, ccpp_physics_run
#endif
use CCPP_data, only: cdata => cdata_tile, CCPP_interstitial
use CCPP_data, only: cdata => cdata_tile
use CCPP_data, only: CCPP_interstitial
#endif
#ifdef MULTI_GASES
use multi_gases_mod, only: virq, virqd, vicpqd, vicvqd, num_gas
Expand Down Expand Up @@ -259,7 +252,7 @@ subroutine Lagrangian_to_Eulerian(last_step, consv, ps, pe, delp, pkz, pk, &
endif

!$OMP parallel do default(none) shared(is,ie,js,je,km,pe,ptop,kord_tm,hydrostatic, &
!$OMP pt,pk,rg,peln,q,nwat,liq_wat,rainwat,ice_wat,snowwat, &
!$OMP pt,pk,rg,peln,q,nwat,liq_wat,rainwat,ice_wat,snowwat, &
!$OMP graupel,q_con,sphum,cappa,r_vir,rcp,k1k,delp, &
!$OMP delz,akap,pkz,te,u,v,ps, gridstruct, last_step, &
!$OMP ak,bk,nq,isd,ied,jsd,jed,kord_tr,fill, adiabatic, &
Expand Down Expand Up @@ -627,9 +620,7 @@ subroutine Lagrangian_to_Eulerian(last_step, consv, ps, pe, delp, pkz, pk, &
!$OMP ng,gridstruct,E_Flux,pdt,dtmp,reproduce_sum,q, &
!$OMP mdt,cld_amt,cappa,dtdt,out_dt,rrg,akap,do_sat_adj, &
!$OMP kord_tm,cdata,CCPP_interstitial) &
#ifdef STATIC
!$OMP shared(ccpp_suite) &
#endif
#ifdef MULTI_GASES
!$OMP shared(num_gas) &
#endif
Expand All @@ -643,9 +634,7 @@ subroutine Lagrangian_to_Eulerian(last_step, consv, ps, pe, delp, pkz, pk, &
!$OMP ng,gridstruct,E_Flux,pdt,dtmp,reproduce_sum,q, &
!$OMP mdt,cld_amt,cappa,dtdt,out_dt,rrg,akap,do_sat_adj, &
!$OMP fast_mp_consv,kord_tm,cdata, CCPP_interstitial) &
#ifdef STATIC
!$OMP shared(ccpp_suite) &
#endif
#ifdef MULTI_GASES
!$OMP shared(num_gas) &
#endif
Expand Down Expand Up @@ -808,15 +797,11 @@ subroutine Lagrangian_to_Eulerian(last_step, consv, ps, pe, delp, pkz, pk, &
if ( do_sat_adj ) then
call timing_on('sat_adj2')
#ifdef CCPP
if (ccpp_initialized(cdata)) then
#ifdef STATIC
if (cdata%initialized()) then
call ccpp_physics_run(cdata, suite_name=trim(ccpp_suite), group_name='fast_physics', ierr=ierr)
#else
call ccpp_physics_run(cdata, group_name='fast_physics', ierr=ierr)
#endif
if (ierr/=0) call mpp_error(FATAL, "Call to ccpp_physics_run for group 'fast_physics' failed")
else
call mpp_error (FATAL, 'Lagrangian_to_Eulerian: can not call CCPP fast physics because cdata not initialized')
call mpp_error (FATAL, 'Lagrangian_to_Eulerian: can not call CCPP fast physics because CCPP not initialized')
endif
#else
!$OMP do
Expand Down
Loading

0 comments on commit 0421431

Please sign in to comment.