diff --git a/doc/ChangeLog b/doc/ChangeLog index 661f91fb0e..3e92f5b388 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,101 @@ =============================================================== +Tag name: ctsm5.1.dev047 +Originator(s): mvertens (Mariana Vertenstein) +Date: Thu Jul 8 12:03:31 MDT 2021 +One-line Summary: Start bounds at 1; remove references to MCT + +Purpose and description of changes +---------------------------------- + +Users: take note of (1), and in particular the caveat for users noted +below (in the "Notes of particular relevance for users"). + +(1) All global index arrays on a given processor now have a starting index of 1 + - bounds_proc for each subgrid level has a starting index of 1 for each level + - bounds_clump for each subgrid level has a starting index of 1 for just + the first clump on the processor - but all the other clumps on the + processor do not start at 1 - but rather are offset with the number of + gridcells, columns, ...etc on the preceeding clumps + +(2) There are no longer any references to any mct data structures other than in the mct cap + - All references to gsmap have been removed from decompMod.F90 and + replaced with new global index arrays for the various subgrid levels + - decompInitMod has been refactored to calculated these global index + arrays using pure MPI rather than mct + - ncdio_pio_F90.in has been refactored to use the new global index + arrays rather than the gsmap data structures + - the data struture ldecomp is no longer needed + - the module spmdGathScatMod.F90 is no longer needed and has been removed + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm5_1 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed or introduced +------------------------ +Issues fixed (include CTSM Issue #): +- Addresses the first part of ESCOMP/CTSM#293 (Rework threading to be + done at a higher level, simplifying array argument passing) + +Notes of particular relevance for users +--------------------------------------- +Caveats for users (e.g., need to interpolate initial conditions): +- Until now, if the model produced an error message with a gridcell or + subgrid index (e.g., "Error at p = 1234"), you could rerun the model + with write statements in conditionals like "if (p == 1234) then". With + the changes in this tag, this simple conditional is no longer + possible. Instead, you will need to also reference the processor + number that produces the error. + + If the error message is produced in the lnd log file, then you can + have a conditional like: + if (masterproc .and. p == 1234) then + + If the error message is produced in the cesm log file, then you can + have a conditional like: + if (iam == X .and. p == 1234) then + + where X is the processor number that produced the message. On some + machines (including cheyenne), this processor number is printed at the + start of each line in the cesm log file. In addition, some aborts will + now print "proc_id = X", and that value can be used. + + Soon we will update + https://escomp.github.io/ctsm-docs/versions/master/html/users_guide/trouble-shooting/trouble-shooting.html + to document the new recommended procedure. + +Testing summary: +---------------- + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + cheyenne ---- ok + izumi ------- ok + +Answer changes +-------------- + +Changes answers relative to baseline: NO + +Other details +------------- +Pull Requests that document the changes (include PR ids): +https://github.com/ESCOMP/CTSM/pull/1420 + +=============================================================== +=============================================================== Tag name: ctsm5.1.dev046 Originator(s): rgknox (Ryan Knox,,,) Date: Fri Jul 2 14:36:44 MDT 2021 diff --git a/doc/ChangeSum b/doc/ChangeSum index f37a03661d..63b3d132e4 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.1.dev047 mvertens 07/08/2021 Start bounds at 1; remove references to MCT ctsm5.1.dev046 rgknox 07/02/2021 Updating external fates has to tag sci.1.46.2_api.16.1.0 ctsm5.1.dev045 slevis 06/29/2021 Include CWD in heterotrophic respiration ctsm5.1.dev044 mvertens 06/24/2021 New stream functionality when using NUOPC or LILAC diff --git a/src/cpl/lilac/lnd_comp_esmf.F90 b/src/cpl/lilac/lnd_comp_esmf.F90 index c01e454211..e9b9e7f407 100644 --- a/src/cpl/lilac/lnd_comp_esmf.F90 +++ b/src/cpl/lilac/lnd_comp_esmf.F90 @@ -24,7 +24,7 @@ module lnd_comp_esmf ! ctsm code use spmdMod , only : masterproc, spmd_init, mpicom - use decompMod , only : bounds_type, ldecomp, get_proc_bounds + use decompMod , only : bounds_type, get_proc_bounds use domainMod , only : ldomain use controlMod , only : control_setNL use clm_varorb , only : eccen, obliqr, lambm0, mvelpp @@ -342,7 +342,7 @@ subroutine lnd_init(comp, import_state, export_state, clock, rc) call ESMF_LogWrite(subname//"ctsm initialize1 done...", ESMF_LOGMSG_INFO) !---------------------- - ! Initialize decomposition (ldecomp) and domain (ldomain) types and generate land mesh + ! Initialize decomposition and domain (ldomain) types and generate land mesh !---------------------- ! TODO: generalize this so that a mask mesh is read in like for nuopc/cmeps ! For now set the meshfile_mask equal to the model_meshfile diff --git a/src/cpl/mct/FireDataBaseType.F90 b/src/cpl/mct/FireDataBaseType.F90 index 281073f9cb..2d95422fd7 100644 --- a/src/cpl/mct/FireDataBaseType.F90 +++ b/src/cpl/mct/FireDataBaseType.F90 @@ -7,19 +7,19 @@ module FireDataBaseType ! module for handling of fire data ! ! !USES: - use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL - use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create, shr_strdata_print - use shr_strdata_mod , only : shr_strdata_advance - use shr_log_mod , only : errMsg => shr_log_errMsg - use clm_varctl , only : iulog, inst_name - use spmdMod , only : masterproc, mpicom, comp_id - use fileutils , only : getavu, relavu - use decompMod , only : gsmap_lnd_gdc2glo - use domainMod , only : ldomain - use abortutils , only : endrun - use decompMod , only : bounds_type + use shr_kind_mod , only : r8 => shr_kind_r8, CL => shr_kind_CL + use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create, shr_strdata_print + use shr_strdata_mod , only : shr_strdata_advance + use shr_log_mod , only : errMsg => shr_log_errMsg + use clm_varctl , only : iulog, inst_name + use spmdMod , only : masterproc, mpicom, comp_id + use fileutils , only : getavu, relavu + use domainMod , only : ldomain + use abortutils , only : endrun + use decompMod , only : bounds_type + use FireMethodType , only : fire_method_type + use lnd_set_decomp_and_domain, only : gsmap_global use mct_mod - use FireMethodType , only : fire_method_type ! implicit none private @@ -38,7 +38,6 @@ module FireDataBaseType type(shr_strdata_type) :: sdat_hdm ! Human population density input data stream type(shr_strdata_type) :: sdat_lnfm ! Lightning input data stream - contains ! ! !PUBLIC MEMBER FUNCTIONS: @@ -171,7 +170,6 @@ subroutine hdm_init( this, bounds, NLFilename ) character(len=CL) :: popdensmapalgo = 'bilinear' ! mapping alogrithm for population density character(len=CL) :: popdens_tintalgo = 'nearest'! time interpolation alogrithm for population density character(len=CL) :: stream_meshfile_popdens ! not used - character(*), parameter :: subName = "('hdmdyn_init')" character(*), parameter :: F00 = "('(hdmdyn_init) ',4a)" !----------------------------------------------------------------------- @@ -225,31 +223,31 @@ subroutine hdm_init( this, bounds, NLFilename ) call clm_domain_mct (bounds, dom_clm) - call shr_strdata_create(this%sdat_hdm,name="clmhdm", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_lnd_gdc2glo, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_popdens, & - yearLast=stream_year_last_popdens, & - yearAlign=model_year_align_popdens, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_popdens), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='mask', & - filePath='', & + call shr_strdata_create(this%sdat_hdm,name="clmhdm", & + pio_subsystem=pio_subsystem, & + pio_iotype=shr_pio_getiotype(inst_name), & + mpicom=mpicom, compid=comp_id, & + gsmap=gsmap_global, ggrid=dom_clm, & + nxg=ldomain%ni, nyg=ldomain%nj, & + yearFirst=stream_year_first_popdens, & + yearLast=stream_year_last_popdens, & + yearAlign=model_year_align_popdens, & + offset=0, & + domFilePath='', & + domFileName=trim(stream_fldFileName_popdens), & + domTvarName='time', & + domXvarName='lon' , & + domYvarName='lat' , & + domAreaName='area', & + domMaskName='mask', & + filePath='', & filename=(/trim(stream_fldFileName_popdens)/) , & - fldListFile='hdm', & - fldListModel='hdm', & - fillalgo='none', & - mapalgo=popdensmapalgo, & - calendar=get_calendar(), & - tintalgo=popdens_tintalgo, & + fldListFile='hdm', & + fldListModel='hdm', & + fillalgo='none', & + mapalgo=popdensmapalgo, & + calendar=get_calendar(), & + tintalgo=popdens_tintalgo, & taxmode='extend' ) if (masterproc) then @@ -381,31 +379,31 @@ subroutine lnfm_init( this, bounds, NLFilename ) call clm_domain_mct (bounds, dom_clm) - call shr_strdata_create(this%sdat_lnfm,name="clmlnfm", & - pio_subsystem=pio_subsystem, & + call shr_strdata_create(this%sdat_lnfm,name="clmlnfm", & + pio_subsystem=pio_subsystem, & pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_lnd_gdc2glo, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_lightng, & - yearLast=stream_year_last_lightng, & - yearAlign=model_year_align_lightng, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_lightng), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='mask', & - filePath='', & - filename=(/trim(stream_fldFileName_lightng)/),& - fldListFile='lnfm', & - fldListModel='lnfm', & - fillalgo='none', & - tintalgo=lightng_tintalgo, & - mapalgo=lightngmapalgo, & - calendar=get_calendar(), & + mpicom=mpicom, compid=comp_id, & + gsmap=gsmap_global, ggrid=dom_clm, & + nxg=ldomain%ni, nyg=ldomain%nj, & + yearFirst=stream_year_first_lightng, & + yearLast=stream_year_last_lightng, & + yearAlign=model_year_align_lightng, & + offset=0, & + domFilePath='', & + domFileName=trim(stream_fldFileName_lightng), & + domTvarName='time', & + domXvarName='lon' , & + domYvarName='lat' , & + domAreaName='area', & + domMaskName='mask', & + filePath='', & + filename=(/trim(stream_fldFileName_lightng)/), & + fldListFile='lnfm', & + fldListModel='lnfm', & + fillalgo='none', & + tintalgo=lightng_tintalgo, & + mapalgo=lightngmapalgo, & + calendar=get_calendar(), & taxmode='cycle' ) if (masterproc) then diff --git a/src/cpl/mct/SoilMoistureStreamMod.F90 b/src/cpl/mct/SoilMoistureStreamMod.F90 index eab6d26c02..883daf7f63 100644 --- a/src/cpl/mct/SoilMoistureStreamMod.F90 +++ b/src/cpl/mct/SoilMoistureStreamMod.F90 @@ -16,26 +16,24 @@ module SoilMoistureStreamMod ! Read in soil moisture from data stream ! ! !USES: - use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create - use shr_strdata_mod , only : shr_strdata_print, shr_strdata_advance - use shr_kind_mod , only : r8 => shr_kind_r8 - use shr_kind_mod , only : CL => shr_kind_CL, CXX => shr_kind_CXX - use shr_log_mod , only : errMsg => shr_log_errMsg - use decompMod , only : bounds_type - use abortutils , only : endrun - use clm_varctl , only : iulog, use_soil_moisture_streams, inst_name - use clm_varcon , only : grlnd - use controlMod , only : NLFilename - use decompMod , only : gsMap_lnd2Dsoi_gdc2glo - use domainMod , only : ldomain - use fileutils , only : getavu, relavu - use LandunitType , only : lun - use ColumnType , only : col - use SoilStateType , only : soilstate_type - use WaterStateBulkType, only : waterstatebulk_type - use perf_mod , only : t_startf, t_stopf - use spmdMod , only : masterproc - use spmdMod , only : mpicom, comp_id + use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create + use shr_strdata_mod , only : shr_strdata_print, shr_strdata_advance + use shr_kind_mod , only : r8 => shr_kind_r8 + use shr_kind_mod , only : CL => shr_kind_CL, CXX => shr_kind_CXX + use shr_log_mod , only : errMsg => shr_log_errMsg + use decompMod , only : bounds_type + use abortutils , only : endrun + use clm_varctl , only : iulog, use_soil_moisture_streams, inst_name + use clm_varcon , only : grlnd + use controlMod , only : NLFilename + use domainMod , only : ldomain + use LandunitType , only : lun + use ColumnType , only : col + use SoilStateType , only : soilstate_type + use WaterStateBulkType , only : waterstatebulk_type + use perf_mod , only : t_startf, t_stopf + use spmdMod , only : masterproc, mpicom, comp_id + use lnd_set_decomp_and_domain , only : gsMap_lnd2Dsoi_gdc2glo use mct_mod use ncdio_pio ! @@ -127,8 +125,7 @@ subroutine PrescribedSoilMoistureInit(bounds) ! Read soilm_streams namelist if (masterproc) then - nu_nml = getavu() - open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) + open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) call find_nlgroup_name(nu_nml, 'soil_moisture_streams', status=nml_error) if (nml_error == 0) then read(nu_nml, nml=soil_moisture_streams,iostat=nml_error) @@ -139,7 +136,6 @@ subroutine PrescribedSoilMoistureInit(bounds) call endrun(subname // ':: ERROR finding soilm_streams namelist') end if close(nu_nml) - call relavu( nu_nml ) endif call shr_mpi_bcast(stream_year_first_soilm, mpicom) @@ -170,11 +166,8 @@ subroutine PrescribedSoilMoistureInit(bounds) call clm_domain_mct (bounds, dom_clm, nlevels=nlevsoi) - ! ! create the field list for these fields...use in shr_strdata_create - ! fldList = trim(soilmString) - if (masterproc) write(iulog,*) 'fieldlist: ', trim(fldList) call shr_strdata_create(sdat_soilm,name="soil_moisture", & diff --git a/src/cpl/mct/UrbanTimeVarType.F90 b/src/cpl/mct/UrbanTimeVarType.F90 index 7a907bb9b9..f637bd8461 100644 --- a/src/cpl/mct/UrbanTimeVarType.F90 +++ b/src/cpl/mct/UrbanTimeVarType.F90 @@ -100,10 +100,10 @@ subroutine urbantv_init(this, bounds, NLFilename) use shr_mpi_mod , only : shr_mpi_bcast use shr_string_mod , only : shr_string_listAppend use shr_strdata_mod , only : shr_strdata_create, shr_strdata_print - use decompMod , only : gsmap_lnd_gdc2glo use domainMod , only : ldomain use shr_infnan_mod , only : nan => shr_infnan_nan, assignment(=) use landunit_varcon , only : isturb_TBD, isturb_HD, isturb_MD + use lnd_set_decomp_and_domain , only : gsmap_global ! ! !ARGUMENTS: implicit none @@ -190,31 +190,31 @@ subroutine urbantv_init(this, bounds, NLFilename) call shr_string_listAppend( fldList, stream_var_name(ifield) ) end do - call shr_strdata_create(this%sdat_urbantv,name="clmurbantv", & - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_lnd_gdc2glo, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=stream_year_first_urbantv, & - yearLast=stream_year_last_urbantv, & - yearAlign=model_year_align_urbantv, & - offset=0, & - domFilePath='', & - domFileName=trim(stream_fldFileName_urbantv), & - domTvarName='time', & - domXvarName='lon' , & - domYvarName='lat' , & - domAreaName='area', & - domMaskName='LANDMASK', & - filePath='', & - filename=(/trim(stream_fldFileName_urbantv)/) , & - fldListFile=fldList, & - fldListModel=fldList, & - fillalgo='none', & - mapalgo=urbantvmapalgo, & - calendar=get_calendar(), & - tintalgo=urbantv_tintalgo, & + call shr_strdata_create(this%sdat_urbantv,name="clmurbantv", & + pio_subsystem=pio_subsystem, & + pio_iotype=shr_pio_getiotype(inst_name), & + mpicom=mpicom, compid=comp_id, & + gsmap=gsmap_global, ggrid=dom_clm, & + nxg=ldomain%ni, nyg=ldomain%nj, & + yearFirst=stream_year_first_urbantv, & + yearLast=stream_year_last_urbantv, & + yearAlign=model_year_align_urbantv, & + offset=0, & + domFilePath='', & + domFileName=trim(stream_fldFileName_urbantv), & + domTvarName='time', & + domXvarName='lon' , & + domYvarName='lat' , & + domAreaName='area', & + domMaskName='LANDMASK', & + filePath='', & + filename=(/trim(stream_fldFileName_urbantv)/) , & + fldListFile=fldList, & + fldListModel=fldList, & + fillalgo='none', & + mapalgo=urbantvmapalgo, & + calendar=get_calendar(), & + tintalgo=urbantv_tintalgo, & taxmode='extend' ) if (masterproc) then diff --git a/src/cpl/mct/ch4FInundatedStreamType.F90 b/src/cpl/mct/ch4FInundatedStreamType.F90 index dcdc76fa08..3c26f4d109 100644 --- a/src/cpl/mct/ch4FInundatedStreamType.F90 +++ b/src/cpl/mct/ch4FInundatedStreamType.F90 @@ -70,13 +70,14 @@ subroutine Init(this, bounds, NLFilename) use shr_mpi_mod , only : shr_mpi_bcast use ndepStreamMod , only : clm_domain_mct use domainMod , only : ldomain - use decompMod , only : bounds_type, gsmap_lnd_gdc2glo + use decompMod , only : bounds_type use mct_mod , only : mct_ggrid, mct_avect_indexra use shr_strdata_mod , only : shr_strdata_type, shr_strdata_create use shr_strdata_mod , only : shr_strdata_print, shr_strdata_advance use spmdMod , only : comp_id, iam use ch4varcon , only : finundation_mtd_h2osfc use ch4varcon , only : finundation_mtd_ZWT_inversion, finundation_mtd_TWS_inversion + use lnd_set_decomp_and_domain , only : gsmap_global ! ! arguments implicit none @@ -108,30 +109,30 @@ subroutine Init(this, bounds, NLFilename) if ( this%useStreams() )then call clm_domain_mct (bounds, dom_clm) - call shr_strdata_create(sdat,name=stream_name,& - pio_subsystem=pio_subsystem, & - pio_iotype=shr_pio_getiotype(inst_name), & - mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_lnd_gdc2glo, ggrid=dom_clm, & - nxg=ldomain%ni, nyg=ldomain%nj, & - yearFirst=1996, & - yearLast=1996, & - yearAlign=1, & - offset=0, & - domFilePath='', & - domFileName=trim(control%stream_fldFileName_ch4finundated), & - domTvarName='time', & - domXvarName='LONGXY' , & - domYvarName='LATIXY' , & - domAreaName='AREA', & - domMaskName='LANDMASK', & - filePath='', & - filename=(/trim(control%stream_fldFileName_ch4finundated)/),& - fldListFile=control%fldList, & - fldListModel=control%fldList, & - fillalgo='none', & - mapalgo=control%ch4finundatedmapalgo, & - calendar=get_calendar(), & + call shr_strdata_create(sdat,name=stream_name, & + pio_subsystem=pio_subsystem, & + pio_iotype=shr_pio_getiotype(inst_name), & + mpicom=mpicom, compid=comp_id, & + gsmap=gsmap_global, ggrid=dom_clm, & + nxg=ldomain%ni, nyg=ldomain%nj, & + yearFirst=1996, & + yearLast=1996, & + yearAlign=1, & + offset=0, & + domFilePath='', & + domFileName=trim(control%stream_fldFileName_ch4finundated), & + domTvarName='time', & + domXvarName='LONGXY' , & + domYvarName='LATIXY' , & + domAreaName='AREA', & + domMaskName='LANDMASK', & + filePath='', & + filename=(/trim(control%stream_fldFileName_ch4finundated)/), & + fldListFile=control%fldList, & + fldListModel=control%fldList, & + fillalgo='none', & + mapalgo=control%ch4finundatedmapalgo, & + calendar=get_calendar(), & taxmode='extend' ) if (masterproc) then diff --git a/src/cpl/mct/laiStreamMod.F90 b/src/cpl/mct/laiStreamMod.F90 index df12d6912e..47d25287b7 100644 --- a/src/cpl/mct/laiStreamMod.F90 +++ b/src/cpl/mct/laiStreamMod.F90 @@ -53,8 +53,8 @@ subroutine lai_init(bounds) use ndepStreamMod , only : clm_domain_mct use histFileMod , only : hist_addfld1d use domainMod , only : ldomain - use decompMod , only : gsmap_lnd_gdc2glo use controlMod , only : NLFilename + use lnd_set_decomp_and_domain , only : gsmap_global ! ! !ARGUMENTS: implicit none @@ -131,7 +131,7 @@ subroutine lai_init(bounds) pio_subsystem=pio_subsystem, & pio_iotype=shr_pio_getiotype(inst_name), & mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_lnd_gdc2glo, ggrid=dom_clm, & + gsmap=gsmap_global, ggrid=dom_clm, & nxg=ldomain%ni, nyg=ldomain%nj, & yearFirst=stream_year_first_lai, & yearLast=stream_year_last_lai, & diff --git a/src/cpl/mct/lnd_comp_mct.F90 b/src/cpl/mct/lnd_comp_mct.F90 index f94a3544dc..1595611a72 100644 --- a/src/cpl/mct/lnd_comp_mct.F90 +++ b/src/cpl/mct/lnd_comp_mct.F90 @@ -11,12 +11,11 @@ module lnd_comp_mct use shr_sys_mod , only : shr_sys_flush use shr_log_mod , only : errMsg => shr_log_errMsg use mct_mod , only : mct_avect, mct_gsmap, mct_gGrid - use decompmod , only : bounds_type, ldecomp + use decompmod , only : bounds_type use lnd_import_export, only : lnd_import, lnd_export ! ! !public member functions: implicit none - save private ! by default make data private ! ! !public member functions: @@ -25,7 +24,6 @@ module lnd_comp_mct public :: lnd_final_mct ! clm finalization/cleanup ! ! !private member functions: - private :: lnd_setgsmap_mct ! set the land model mct gs map private :: lnd_domain_mct ! set the land model domain information private :: lnd_handle_resume ! handle pause/resume signals from the coupler @@ -67,8 +65,9 @@ subroutine lnd_init_mct( EClock, cdata_l, x2l_l, l2x_l, NLFilename ) use spmdMod , only : masterproc, spmd_init use clm_varctl , only : nsrStartup, nsrContinue, nsrBranch use clm_cpl_indices , only : clm_cpl_indices_set - use mct_mod , only : mct_aVect_init, mct_aVect_zero, mct_gsMap_lsize - use lnd_set_decomp_and_domain, only : lnd_set_decomp_and_domain_from_surfrd + use mct_mod , only : mct_aVect_init, mct_aVect_zero, mct_gsMap, mct_gsMap_init + use decompMod , only : gindex_global + use lnd_set_decomp_and_domain, only : lnd_set_decomp_and_domain_from_surfrd, gsmap_global use ESMF ! ! !ARGUMENTS: @@ -84,6 +83,7 @@ subroutine lnd_init_mct( EClock, cdata_l, x2l_l, l2x_l, NLFilename ) type(mct_gGrid), pointer :: dom_l ! Land model domain type(seq_infodata_type), pointer :: infodata ! CESM driver level info data integer :: lsize ! size of attribute vector + integer :: gsize ! global size integer :: g,i,j ! indices integer :: dtime_sync ! coupling time-step from the input synchronization clock logical :: exists ! true if file exists @@ -208,7 +208,7 @@ subroutine lnd_init_mct( EClock, cdata_l, x2l_l, l2x_l, NLFilename ) ! Read namelists call initialize1(dtime=dtime_sync) - ! Initialize decomposition (ldecomp) and domain (ldomain) types + ! Initialize decomposition and domain (ldomain) type call lnd_set_decomp_and_domain_from_surfrd(noland, ni, nj) ! If no land then exit out of initialization @@ -227,8 +227,10 @@ subroutine lnd_init_mct( EClock, cdata_l, x2l_l, l2x_l, NLFilename ) ! Initialize clm gsMap, clm domain and clm attribute vectors call get_proc_bounds( bounds ) - call lnd_SetgsMap_mct( bounds, mpicom_lnd, LNDID, gsMap_lnd ) - lsize = mct_gsMap_lsize(gsMap_lnd, mpicom_lnd) + lsize = bounds%endg - bounds%begg + 1 + gsize = ldomain%ni * ldomain%nj + call mct_gsMap_init( gsMap_lnd, gindex_global, mpicom_lnd, LNDID, lsize, gsize ) + gsmap_global => gsmap_lnd ! module variable in lnd_set_decomp_and_domain call lnd_domain_mct( bounds, lsize, gsMap_lnd, dom_l ) call mct_aVect_init(x2l_l, rList=seq_flds_x2l_fields, lsize=lsize) call mct_aVect_zero(x2l_l) @@ -513,49 +515,6 @@ subroutine lnd_final_mct( EClock, cdata_l, x2l_l, l2x_l) ! fill this in end subroutine lnd_final_mct - !==================================================================================== - subroutine lnd_setgsmap_mct( bounds, mpicom_lnd, LNDID, gsMap_lnd ) - ! - ! !DESCRIPTION: - ! Set the MCT GS map for the land model - ! - ! !USES: - use shr_kind_mod , only : r8 => shr_kind_r8 - use domainMod , only : ldomain - use mct_mod , only : mct_gsMap, mct_gsMap_init - implicit none - ! - ! !ARGUMENTS: - type(bounds_type) , intent(in) :: bounds ! bounds - integer , intent(in) :: mpicom_lnd ! MPI communicator for the clm land model - integer , intent(in) :: LNDID ! Land model identifyer number - type(mct_gsMap) , intent(out) :: gsMap_lnd ! Resulting MCT GS map for the land model - ! - ! !LOCAL VARIABLES: - integer,allocatable :: gindex(:) ! Number the local grid points - integer :: i, j, n, gi ! Indices - integer :: lsize,gsize ! GS Map size - integer :: ier ! Error code - !--------------------------------------------------------------------------- - - ! Build the land grid numbering for MCT - ! NOTE: Numbering scheme is: West to East and South to North - ! starting at south pole. Should be the same as what's used in SCRIP - allocate(gindex(bounds%begg:bounds%endg),stat=ier) - - ! number the local grid - do n = bounds%begg, bounds%endg - gindex(n) = ldecomp%gdc2glo(n) - end do - lsize = bounds%endg - bounds%begg + 1 - gsize = ldomain%ni * ldomain%nj - - call mct_gsMap_init( gsMap_lnd, gindex, mpicom_lnd, LNDID, lsize, gsize ) - - deallocate(gindex) - - end subroutine lnd_SetgsMap_mct - !==================================================================================== subroutine lnd_domain_mct( bounds, lsize, gsMap_l, dom_l ) ! diff --git a/src/cpl/mct/lnd_set_decomp_and_domain.F90 b/src/cpl/mct/lnd_set_decomp_and_domain.F90 index f5f6046df0..0a37554313 100644 --- a/src/cpl/mct/lnd_set_decomp_and_domain.F90 +++ b/src/cpl/mct/lnd_set_decomp_and_domain.F90 @@ -1,9 +1,9 @@ module lnd_set_decomp_and_domain - use shr_kind_mod , only : r8 => shr_kind_r8, cl=>shr_kind_cl + use shr_kind_mod , only : r8 => shr_kind_r8 use spmdMod , only : masterproc use clm_varctl , only : iulog - use perf_mod , only : t_startf, t_stopf, t_barrierf + use mct_mod , only : mct_gsMap implicit none private ! except @@ -15,6 +15,13 @@ module lnd_set_decomp_and_domain private :: surfrd_get_globmask ! Reads global land mask (needed for setting domain decomp) private :: surfrd_get_grid ! Read grid/ladnfrac data into domain (after domain decomp) + ! translation between local and global indices at gridcell level + type(mct_gsmap), pointer, public :: gsmap_global + + ! translation between local and global indices at gridcell level for multiple levels + ! needed for 3d soil moisture stream + type(mct_gsmap), target , public :: gsMap_lnd2Dsoi_gdc2glo + character(len=*), parameter, private :: sourcefile = & __FILE__ @@ -24,13 +31,13 @@ module lnd_set_decomp_and_domain subroutine lnd_set_decomp_and_domain_from_surfrd(noland, ni, nj) - ! Initialize ldecomp and ldomain data types + ! Initialize ldomain data types use clm_varpar , only: nlevsoi use clm_varctl , only: fatmlndfrc, use_soil_moisture_streams use decompInitMod , only: decompInit_lnd use decompMod , only: bounds_type, get_proc_bounds - use domainMod , only: ldomain, domain_init, domain_check + use domainMod , only: ldomain, domain_check ! input/output variables logical, intent(out) :: noland @@ -61,7 +68,6 @@ subroutine lnd_set_decomp_and_domain_from_surfrd(noland, ni, nj) noland = .false. end if - ! Initialize ldecomp data type ! Determine ctsm gridcell decomposition and processor bounds for gridcells call decompInit_lnd(ni, nj, amask) deallocate(amask) @@ -112,7 +118,6 @@ subroutine surfrd_get_globmask(filename, mask, ni, nj) integer :: n,i,j ! index integer :: ier ! error status type(file_desc_t) :: ncid ! netcdf id - character(len=256) :: varname ! variable name character(len=256) :: locfn ! local file name logical :: readvar ! read variable in or not integer , allocatable :: idata2d(:,:) @@ -174,7 +179,7 @@ subroutine surfrd_get_globmask(filename, mask, ni, nj) end subroutine surfrd_get_globmask !----------------------------------------------------------------------- - subroutine surfrd_get_grid(begg, endg, ldomain, filename, glcfilename) + subroutine surfrd_get_grid(begg, endg, ldomain, filename) ! Read the surface dataset grid related information: ! This is called after the domain decomposition has been created @@ -182,28 +187,24 @@ subroutine surfrd_get_grid(begg, endg, ldomain, filename, glcfilename) ! - real longitude of grid cell (degrees) use clm_varcon , only : spval, re, grlnd - use domainMod , only : domain_type, domain_init, domain_clean, lon1d, lat1d + use domainMod , only : domain_type, lon1d, lat1d, domain_init use fileutils , only : getfil use abortutils , only : endrun use shr_log_mod , only : errMsg => shr_log_errMsg - use ncdio_pio , only : file_desc_t, var_desc_t, ncd_pio_openfile, ncd_pio_closefile - use ncdio_pio , only : ncd_io, check_var, ncd_inqfdims, check_dim_size, ncd_inqdid, ncd_inqdlen + use ncdio_pio , only : file_desc_t, ncd_pio_openfile, ncd_pio_closefile + use ncdio_pio , only : ncd_io, check_var, ncd_inqfdims, check_dim_size use pio ! input/output variables integer , intent(in) :: begg, endg type(domain_type) , intent(inout) :: ldomain ! domain to init character(len=*) , intent(in) :: filename ! grid filename - character(len=*) ,optional , intent(in) :: glcfilename ! glc mask filename ! local variables type(file_desc_t) :: ncid ! netcdf id integer :: beg ! local beg index integer :: end ! local end index integer :: ni,nj,ns ! size of grid on file - integer :: dimid,varid ! netCDF id's - integer :: start(1), count(1) ! 1d lat/lon array sections - integer :: ier,ret ! error status logical :: readvar ! true => variable is on input file logical :: isgrid2d ! true => file is 2d lat/lon logical :: istype_domain ! true => input file is of type domain @@ -295,12 +296,11 @@ end subroutine surfrd_get_grid subroutine decompInit_lnd3D(lni,lnj,lnk) ! ! !DESCRIPTION: - ! - ! Create a 3D decomposition gsmap for the global 2D grid with soil levels - ! as the 3rd dimesnion. + ! Create a 3D decomposition gsmap for the global 2D grid with soil levels + ! as the 3rd dimesnion. ! ! !USES: - use decompMod, only : ldecomp, gsmap_lnd2dsoi_gdc2glo, get_proc_bounds + use decompMod, only : gindex_global, bounds_type, get_proc_bounds use spmdMod , only : comp_id, mpicom use mct_mod , only : mct_gsmap_init ! @@ -308,14 +308,17 @@ subroutine decompInit_lnd3D(lni,lnj,lnk) integer , intent(in) :: lni,lnj,lnk ! domain global size ! ! !LOCAL VARIABLES: - integer :: m,n,k ! indices - integer :: begg,endg,lsize,gsize ! used for gsmap init - integer :: begg3d,endg3d - integer, pointer :: gindex(:) ! global index for gsmap init + integer :: m,n,k ! indices + integer :: begg,endg,lsize,gsize ! used for gsmap init + integer :: begg3d,endg3d + integer, pointer :: gindex(:) ! global index for gsmap init + type(bounds_type) :: bounds !------------------------------------------------------------------------------ - ! Set gsMap_lnd_gdc2glo (the global index here includes mask=0 or ocean points) - call get_proc_bounds(begg, endg) + ! Initialize gsmap_lnd2dsoi_gdc2glo + call get_proc_bounds(bounds) + begg = bounds%begg; endg=bounds%endg + begg3d = (begg-1)*lnk + 1 endg3d = endg*lnk lsize = (endg3d - begg3d + 1 ) @@ -323,7 +326,7 @@ subroutine decompInit_lnd3D(lni,lnj,lnk) do k = 1, lnk do n = begg,endg m = (begg-1)*lnk + (k-1)*(endg-begg+1) + (n-begg+1) - gindex(m) = ldecomp%gdc2glo(n) + (k-1)*(lni*lnj) + gindex(m) = gindex_global(n-begg+1) + (k-1)*(lni*lnj) enddo enddo gsize = lni * lnj * lnk diff --git a/src/cpl/mct/ndepStreamMod.F90 b/src/cpl/mct/ndepStreamMod.F90 index e99afd81f3..af03ca5c35 100644 --- a/src/cpl/mct/ndepStreamMod.F90 +++ b/src/cpl/mct/ndepStreamMod.F90 @@ -14,14 +14,12 @@ module ndepStreamMod use spmdMod , only: mpicom, masterproc, comp_id, iam use clm_varctl , only: iulog, inst_name use abortutils , only: endrun - use fileutils , only: getavu, relavu - use decompMod , only: bounds_type, ldecomp + use decompMod , only: bounds_type use domainMod , only: ldomain ! !PUBLIC TYPES: implicit none private - save ! !PUBLIC MEMBER FUNCTIONS: public :: ndep_init ! position datasets for dynamic ndep @@ -58,7 +56,7 @@ subroutine ndep_init(bounds, NLFilename) use shr_nl_mod , only : shr_nl_find_group_name use shr_log_mod , only : errMsg => shr_log_errMsg use shr_mpi_mod , only : shr_mpi_bcast - use decompMod , only : gsmap_lnd_gdc2glo + use lnd_set_decomp_and_domain , only : gsMap_lnd2Dsoi_gdc2glo, gsmap_global ! ! arguments implicit none @@ -96,8 +94,7 @@ subroutine ndep_init(bounds, NLFilename) ! Read ndepdyn_nml namelist if (masterproc) then - nu_nml = getavu() - open( nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) + open( newunit=nu_nml, file=trim(NLFilename), status='old', iostat=nml_error ) call shr_nl_find_group_name(nu_nml, 'ndepdyn_nml', status=nml_error) if (nml_error == 0) then read(nu_nml, nml=ndepdyn_nml,iostat=nml_error) @@ -108,7 +105,6 @@ subroutine ndep_init(bounds, NLFilename) call endrun(msg=' ERROR finding ndepdyn_nml namelist'//errMsg(sourcefile, __LINE__)) end if close(nu_nml) - call relavu( nu_nml ) endif call shr_mpi_bcast(stream_year_first_ndep , mpicom) @@ -141,7 +137,7 @@ subroutine ndep_init(bounds, NLFilename) pio_subsystem=pio_subsystem, & pio_iotype=shr_pio_getiotype(inst_name), & mpicom=mpicom, compid=comp_id, & - gsmap=gsmap_lnd_gdc2glo, ggrid=dom_clm, & + gsmap=gsmap_global, ggrid=dom_clm, & nxg=ldomain%ni, nyg=ldomain%nj, & yearFirst=stream_year_first_ndep, & yearLast=stream_year_last_ndep, & @@ -268,13 +264,12 @@ subroutine clm_domain_mct(bounds, dom_clm, nlevels) !------------------------------------------------------------------- ! Set domain data type for internal clm grid - use clm_varcon , only : re - use domainMod , only : ldomain - use mct_mod , only : mct_ggrid, mct_gsMap_lsize, mct_gGrid_init - use mct_mod , only : mct_gsMap_orderedPoints, mct_gGrid_importIAttr - use mct_mod , only : mct_gGrid_importRAttr - use mct_mod , only : mct_gsMap - use decompMod , only : gsmap_lnd_gdc2glo, gsMap_lnd2Dsoi_gdc2glo + use clm_varcon , only : re + use domainMod , only : ldomain + use mct_mod , only : mct_ggrid, mct_gsMap_lsize, mct_gGrid_init + use mct_mod , only : mct_gsMap_orderedPoints, mct_gGrid_importIAttr + use mct_mod , only : mct_gGrid_importRAttr, mct_gsMap + use lnd_set_decomp_and_domain , only : gsMap_lnd2Dsoi_gdc2glo, gsmap_global implicit none ! ! arguments @@ -294,7 +289,7 @@ subroutine clm_domain_mct(bounds, dom_clm, nlevels) nlevs = 1 if ( present(nlevels) ) nlevs = nlevels if ( nlevs == 1 ) then - gsmap => gsmap_lnd_gdc2glo + gsmap => gsmap_global else gsmap => gsMap_lnd2Dsoi_gdc2glo end if diff --git a/src/cpl/nuopc/lnd_comp_nuopc.F90 b/src/cpl/nuopc/lnd_comp_nuopc.F90 index 19c7748297..d4bf072402 100644 --- a/src/cpl/nuopc/lnd_comp_nuopc.F90 +++ b/src/cpl/nuopc/lnd_comp_nuopc.F90 @@ -321,7 +321,7 @@ subroutine InitializeRealize(gcomp, importState, exportState, clock, rc) use ESMF , only : ESMF_VM, ESMF_VMGet use clm_instMod , only : lnd2atm_inst, lnd2glc_inst, water_inst use domainMod , only : ldomain - use decompMod , only : ldecomp, bounds_type, get_proc_bounds + use decompMod , only : bounds_type, get_proc_bounds use lnd_set_decomp_and_domain , only : lnd_set_decomp_and_domain_from_readmesh use lnd_set_decomp_and_domain , only : lnd_set_mesh_for_single_column use lnd_set_decomp_and_domain , only : lnd_set_decomp_and_domain_for_single_column diff --git a/src/cpl/share_esmf/lnd_set_decomp_and_domain.F90 b/src/cpl/share_esmf/lnd_set_decomp_and_domain.F90 index 40f43e7f4d..c881282132 100644 --- a/src/cpl/share_esmf/lnd_set_decomp_and_domain.F90 +++ b/src/cpl/share_esmf/lnd_set_decomp_and_domain.F90 @@ -37,7 +37,7 @@ subroutine lnd_set_decomp_and_domain_from_readmesh(driver, vm, meshfile_lnd, mes use decompInitMod , only : decompInit_lnd use domainMod , only : ldomain, domain_init - use decompMod , only : ldecomp, bounds_type, get_proc_bounds + use decompMod , only : gindex_global, bounds_type, get_proc_bounds use clm_varpar , only : nlevsoi use clm_varctl , only : use_soil_moisture_streams @@ -134,7 +134,7 @@ subroutine lnd_set_decomp_and_domain_from_readmesh(driver, vm, meshfile_lnd, mes allocate(gindex_lnd(nlnd)) do g = begg, endg n = 1 + (g - begg) - gindex_lnd(n) = ldecomp%gdc2glo(g) + gindex_lnd(n) = gindex_global(g-begg+1) end do ! Initialize domain data structure @@ -241,7 +241,7 @@ subroutine lnd_set_decomp_and_domain_for_single_column(scol_lon, scol_lat, scol_ type(bounds_type) :: bounds ! bounds !------------------------------------------------------------------------------- - ! Determine ldecomp and ldomain + ! Determine decomp and ldomain call decompInit_lnd(lni=1, lnj=1, amask=(/1/)) ! Initialize processor bounds diff --git a/src/main/GetGlobalValuesMod.F90 b/src/main/GetGlobalValuesMod.F90 index 9e06672c45..aec1406c0c 100644 --- a/src/main/GetGlobalValuesMod.F90 +++ b/src/main/GetGlobalValuesMod.F90 @@ -27,11 +27,10 @@ integer function GetGlobalIndex(decomp_index, clmlevel) ! ! Uses: use shr_log_mod, only: errMsg => shr_log_errMsg - use decompMod , only: bounds_type, get_clmlevel_gsmap, get_proc_bounds + use decompMod , only: bounds_type, get_clmlevel_gindex, get_proc_bounds use spmdMod , only: iam use clm_varcon , only: nameg, namel, namec, namep use clm_varctl , only: iulog - use mct_mod , only: mct_gsMap, mct_gsMap_orderedPoints use shr_sys_mod, only: shr_sys_abort ! ! Arguments @@ -39,10 +38,9 @@ integer function GetGlobalIndex(decomp_index, clmlevel) character(len=*) , intent(in) :: clmlevel ! ! Local Variables: - type(bounds_type) :: bounds_proc ! processor bounds - type(mct_gsMap),pointer :: gsmap ! global seg map - integer, pointer,dimension(:) :: gsmap_ordered ! gsmap ordered points - integer :: beg_index ! beginning proc index for clmlevel + type(bounds_type) :: bounds_proc ! processor bounds + integer :: beg_index ! beginning proc index for clmlevel + integer, pointer :: gindex(:) !---------------------------------------------------------------- call get_proc_bounds(bounds_proc) @@ -60,10 +58,8 @@ integer function GetGlobalIndex(decomp_index, clmlevel) errmsg(sourcefile, __LINE__)) end if - call get_clmlevel_gsmap(clmlevel=trim(clmlevel), gsmap=gsmap) - call mct_gsMap_orderedPoints(gsmap, iam, gsmap_ordered) - GetGlobalIndex = gsmap_ordered(decomp_index - beg_index + 1) - deallocate(gsmap_ordered) + call get_clmlevel_gindex(clmlevel=trim(clmlevel), gindex=gindex) + GetGlobalIndex = gindex(decomp_index - beg_index + 1) end function GetGlobalIndex @@ -82,25 +78,24 @@ function GetGlobalIndexArray(decomp_index, bounds1, bounds2, clmlevel) ! Uses: #include "shr_assert.h" use shr_log_mod, only: errMsg => shr_log_errMsg - use decompMod , only: bounds_type, get_clmlevel_gsmap, get_proc_bounds + use decompMod , only: bounds_type, get_clmlevel_gindex, get_proc_bounds use spmdMod , only: iam use clm_varcon , only: nameg, namel, namec, namep use clm_varctl , only: iulog - use mct_mod + use shr_sys_mod, only: shr_sys_abort ! ! Arguments - integer, intent(in) :: bounds1 ! lower bound of the input & returned arrays - integer, intent(in) :: bounds2 ! upper bound of the input & returned arrays - integer, intent(in) :: decomp_index(bounds1:) - character(len=*) , intent(in) :: clmlevel - integer :: GetGlobalIndexArray(bounds1:bounds2) + integer , intent(in) :: bounds1 ! lower bound of the input & returned arrays + integer , intent(in) :: bounds2 ! upper bound of the input & returned arrays + integer , intent(in) :: decomp_index(bounds1:) + character(len=*) , intent(in) :: clmlevel + integer :: GetGlobalIndexArray(bounds1:bounds2) ! ! Local Variables: - type(bounds_type) :: bounds_proc ! processor bounds - type(mct_gsMap),pointer :: gsmap ! global seg map - integer, pointer,dimension(:) :: gsmap_ordered ! gsmap ordered points - integer :: beg_index ! beginning proc index for clmlevel - integer :: i + type(bounds_type) :: bounds_proc ! processor bounds + integer :: beg_index ! beginning proc index for clmlevel + integer :: i + integer , pointer :: gindex(:) !---------------------------------------------------------------- SHR_ASSERT_ALL_FL((ubound(decomp_index) == (/bounds2/)), sourcefile, __LINE__) @@ -119,12 +114,10 @@ function GetGlobalIndexArray(decomp_index, bounds1, bounds2, clmlevel) errmsg(__FILE__, __LINE__)) end if - call get_clmlevel_gsmap(clmlevel=trim(clmlevel), gsmap=gsmap) - call mct_gsMap_orderedPoints(gsmap, iam, gsmap_ordered) + call get_clmlevel_gindex(clmlevel=trim(clmlevel), gindex=gindex) do i=bounds1,bounds2 - GetGlobalIndexArray(i) = gsmap_ordered(decomp_index(i) - beg_index + 1) + GetGlobalIndexArray(i) = gindex(decomp_index(i) - beg_index + 1) enddo - deallocate(gsmap_ordered) end function GetGlobalIndexArray @@ -144,6 +137,7 @@ subroutine GetGlobalWrite(decomp_index, clmlevel) use LandunitType , only : lun use ColumnType , only : col use PatchType , only : patch + use spmdMod , only : iam ! ! Arguments: integer , intent(in) :: decomp_index @@ -153,6 +147,8 @@ subroutine GetGlobalWrite(decomp_index, clmlevel) integer :: igrc, ilun, icol, ipft !----------------------------------------------------------------------- + write(iulog,*)'proc_id = ',iam + if (trim(clmlevel) == nameg) then igrc = decomp_index diff --git a/src/main/GridcellType.F90 b/src/main/GridcellType.F90 index 30fe988eff..9d31d41e86 100644 --- a/src/main/GridcellType.F90 +++ b/src/main/GridcellType.F90 @@ -21,7 +21,6 @@ module GridcellType type, public :: gridcell_type ! topological mapping functionality, local 1d gdc arrays - integer , pointer :: gindex (:) ! global index real(r8), pointer :: area (:) ! total land area, gridcell (km^2) real(r8), pointer :: lat (:) ! latitude (radians) real(r8), pointer :: lon (:) ! longitude (radians) @@ -63,7 +62,6 @@ subroutine Init(this, begg, endg) !------------------------------------------------------------------------ ! The following is set in InitGridCells - allocate(this%gindex (begg:endg)) ; this%gindex (:) = ispval allocate(this%area (begg:endg)) ; this%area (:) = nan allocate(this%lat (begg:endg)) ; this%lat (:) = nan allocate(this%lon (begg:endg)) ; this%lon (:) = nan @@ -88,7 +86,6 @@ subroutine Clean(this) class(gridcell_type) :: this !------------------------------------------------------------------------ - deallocate(this%gindex ) deallocate(this%area ) deallocate(this%lat ) deallocate(this%lon ) diff --git a/src/main/accumulMod.F90 b/src/main/accumulMod.F90 index 3baba13814..55ac2bfa7d 100644 --- a/src/main/accumulMod.F90 +++ b/src/main/accumulMod.F90 @@ -150,9 +150,9 @@ subroutine init_accum_field (name, units, desc, & ! possible that init_value doesn't matter even in this case). ! ! !USES: - use shr_const_mod, only: SHR_CONST_CDAY - use clm_time_manager, only : get_step_size - use decompMod, only : get_proc_bounds + use shr_const_mod , only: SHR_CONST_CDAY + use clm_time_manager , only : get_step_size + use decompMod , only : get_proc_bounds, bounds_type ! ! !ARGUMENTS: implicit none @@ -175,6 +175,7 @@ subroutine init_accum_field (name, units, desc, & integer :: begl, endl ! per-proc beginning and ending landunit indices integer :: begg, endg ! per-proc gridcell ending gridcell indices integer :: begCohort, endCohort ! per-proc beg end cohort indices + type(bounds_type) :: bounds character(len=*), parameter :: subname = 'init_accum_field' !------------------------------------------------------------------------ @@ -188,8 +189,12 @@ subroutine init_accum_field (name, units, desc, & ! Determine necessary indices - call get_proc_bounds(begg, endg, begl, endl, begc, endc, begp, endp, & - begCohort, endCohort ) + call get_proc_bounds(bounds) + begg = bounds%begg; endg = bounds%endg + begl = bounds%begl; endl = bounds%endl + begc = bounds%begc; endc = bounds%endc + begp = bounds%begp; endp = bounds%endp + begCohort = bounds%begCoHort; endCohort = bounds%endCoHort ! update field index ! Consistency check that number of accumulated does not exceed maximum. diff --git a/src/main/clm_initializeMod.F90 b/src/main/clm_initializeMod.F90 index 47b8b32775..17b52bb49e 100644 --- a/src/main/clm_initializeMod.F90 +++ b/src/main/clm_initializeMod.F90 @@ -181,22 +181,19 @@ subroutine initialize2(ni,nj) type(bounds_type) :: bounds_clump ! clump bounds integer :: nclumps ! number of clumps on this processor integer :: nc ! clump index - logical :: lexist logical :: reset_dynbal_baselines_all_columns logical :: reset_dynbal_baselines_lake_columns integer :: begg, endg - integer :: begp, endp - integer :: begc, endc - integer :: begl, endl real(r8), pointer :: data2dptr(:,:) ! temp. pointers for slicing larger arrays - character(len=32) :: subname = 'initialize2' ! subroutine name + character(len=32) :: subname = 'initialize2' ! subroutine name !----------------------------------------------------------------------- call t_startf('clm_init2') - ! Get processor bounds - call get_proc_bounds(begg, endg) - + ! Get processor bounds for gridcells + call get_proc_bounds(bounds_proc) + begg = bounds_proc%begg; endg = bounds_proc%endg + ! Initialize glc behavior call glc_behavior%Init(begg, endg, NLFilename) @@ -255,7 +252,13 @@ subroutine initialize2(ni,nj) ! Build hierarchy and topological info for derived types ! This is needed here for the following call to decompInit_glcp - call initGridCells(glc_behavior) + nclumps = get_proc_clumps() + !$OMP PARALLEL DO PRIVATE (nc, bounds_clump) + do nc = 1, nclumps + call get_clump_bounds(nc, bounds_clump) + call initGridCells(bounds_clump, glc_behavior) + end do + !$OMP END PARALLEL DO ! Set global seg maps for gridcells, landlunits, columns and patches call decompInit_glcp(ni, nj, glc_behavior) @@ -263,7 +266,6 @@ subroutine initialize2(ni,nj) ! Set filters call allocFilters() - nclumps = get_proc_clumps() !$OMP PARALLEL DO PRIVATE (nc, bounds_clump) do nc = 1, nclumps call get_clump_bounds(nc, bounds_clump) diff --git a/src/main/controlMod.F90 b/src/main/controlMod.F90 index a5dd42d89c..e91f853e32 100644 --- a/src/main/controlMod.F90 +++ b/src/main/controlMod.F90 @@ -17,7 +17,7 @@ module controlMod use abortutils , only: endrun use spmdMod , only: masterproc, mpicom use spmdMod , only: MPI_CHARACTER, MPI_INTEGER, MPI_LOGICAL, MPI_REAL8 - use decompMod , only: clump_pproc + use decompInitMod , only: clump_pproc use clm_varcon , only: h2osno_max use clm_varpar , only: maxpatch_glc, numrad, nlevsno use fileutils , only: getavu, relavu, get_filename diff --git a/src/main/decompInitMod.F90 b/src/main/decompInitMod.F90 index a6dbdda578..3514979533 100644 --- a/src/main/decompInitMod.F90 +++ b/src/main/decompInitMod.F90 @@ -6,42 +6,42 @@ module decompInitMod ! be mapped back to atmosphere physics chunks. ! ! !USES: - use shr_kind_mod , only : r8 => shr_kind_r8 - use shr_sys_mod , only : shr_sys_flush - use shr_log_mod , only : errMsg => shr_log_errMsg - use spmdMod , only : masterproc, iam, npes, mpicom, comp_id - use abortutils , only : endrun - use clm_varctl , only : iulog, use_fates - use clm_varcon , only : grlnd - use GridcellType , only : grc - use LandunitType , only : lun - use ColumnType , only : col - use PatchType , only : patch - use glcBehaviorMod , only : glc_behavior_type - use decompMod - use mct_mod , only : mct_gsMap_init, mct_gsmap_gsize - use FatesInterfaceTypesMod, only : fates_maxElementsPerSite + use shr_kind_mod , only : r8 => shr_kind_r8 + use shr_sys_mod , only : shr_sys_flush + use shr_log_mod , only : errMsg => shr_log_errMsg + use spmdMod , only : masterproc, iam, npes, mpicom + use abortutils , only : endrun + use clm_varctl , only : iulog ! - ! !PUBLIC TYPES: implicit none + private + ! + ! !PUBLIC TYPES: ! ! !PUBLIC MEMBER FUNCTIONS: - public decompInit_lnd ! initializes lnd grid decomposition into clumps and processors - public decompInit_clumps ! initializes atm grid decomposition into clumps - public decompInit_glcp ! initializes g,l,c,p decomp info + public :: decompInit_lnd ! initializes lnd grid decomposition into clumps and processors + public :: decompInit_clumps ! initializes atm grid decomposition into clumps + public :: decompInit_glcp ! initializes g,l,c,p decomp info + ! + ! !PRIVATE MEMBER FUNCTIONS: + ! + ! PUBLIC TYPES: + integer, public :: clump_pproc ! number of clumps per MPI process ! ! !PRIVATE TYPES: - private - integer, pointer :: lcid(:) ! temporary for setting ldecomp - - character(len=*), parameter, private :: sourcefile = & + integer, pointer :: lcid(:) ! temporary for setting decomposition + integer :: nglob_x, nglob_y ! global sizes + integer, parameter :: dbug=1 ! 0 = min, 1=normal, 2=much, 3=max + character(len=*), parameter :: sourcefile = & __FILE__ + +#include ! mpi library include file !------------------------------------------------------------------------------ contains !------------------------------------------------------------------------------ - subroutine decompInit_lnd(lni,lnj,amask) + subroutine decompInit_lnd(lni, lnj, amask) ! ! !DESCRIPTION: ! This subroutine initializes the land surface decomposition into a clump @@ -49,16 +49,17 @@ subroutine decompInit_lnd(lni,lnj,amask) ! set by clump_pproc ! ! !USES: - use clm_varctl, only : nsegspc + use clm_varctl , only : nsegspc + use decompMod , only : gindex_global, nclumps, clumps + use decompMod , only : bounds_type, get_proc_bounds, procinfo ! ! !ARGUMENTS: - implicit none integer , intent(in) :: amask(:) integer , intent(in) :: lni,lnj ! domain global size ! ! !LOCAL VARIABLES: integer :: lns ! global domain size - integer :: ln,lj ! indices + integer :: ln ! indices integer :: ag,an,ai,aj ! indices integer :: numg ! number of land gridcells logical :: seglen1 ! is segment length one @@ -67,9 +68,10 @@ subroutine decompInit_lnd(lni,lnj,amask) integer :: cid,pid ! indices integer :: n,m,ng ! indices integer :: ier ! error code - integer :: beg,end,lsize,gsize ! used for gsmap init - integer, pointer :: gindex(:) ! global index for gsmap init - integer, pointer :: clumpcnt(:) ! clump index counter + integer :: begg, endg ! beg and end gridcells + integer, pointer :: clumpcnt(:) ! clump index counter + integer, allocatable :: gdc2glo(:)! used to create gindex_global + type(bounds_type) :: bounds ! contains subgrid bounds data !------------------------------------------------------------------------------ lns = lni * lnj @@ -236,24 +238,23 @@ subroutine decompInit_lnd(lni,lnj,amask) end if enddo - ! Set ldecomp + ! Set gindex_global - allocate(ldecomp%gdc2glo(numg), stat=ier) + allocate(gdc2glo(numg), stat=ier) if (ier /= 0) then - write(iulog,*) 'decompInit_lnd(): allocation error1 for ldecomp, etc' + write(iulog,*) 'decompInit_lnd(): allocation error1 for gdc2glo , etc' call endrun(msg=errMsg(sourcefile, __LINE__)) end if + gdc2glo(:) = 0 allocate(clumpcnt(nclumps),stat=ier) if (ier /= 0) then write(iulog,*) 'decompInit_lnd(): allocation error1 for clumpcnt' call endrun(msg=errMsg(sourcefile, __LINE__)) end if - ldecomp%gdc2glo(:) = 0 - ag = 0 - ! clumpcnt is the start gdc index of each clump + ag = 0 clumpcnt = 0 ag = 1 do pid = 0,npes-1 @@ -274,29 +275,26 @@ subroutine decompInit_lnd(lni,lnj,amask) cid = lcid(an) if (cid > 0) then ag = clumpcnt(cid) - ldecomp%gdc2glo(ag) = an + gdc2glo(ag) = an clumpcnt(cid) = clumpcnt(cid) + 1 end if end do end do - deallocate(clumpcnt) - - ! Set gsMap_lnd_gdc2glo (the global index here includes mask=0 or ocean points) - - call get_proc_bounds(beg, end) - - allocate(gindex(beg:end)) - do n = beg,end - gindex(n) = ldecomp%gdc2glo(n) + ! Initialize global gindex (non-compressed, includes ocean points) + ! Note that gindex_global goes from (1:endg) + nglob_x = lni ! decompMod module variables + nglob_y = lnj ! decompMod module variables + call get_proc_bounds(bounds) + allocate(gindex_global(1:bounds%endg)) + do n = procinfo%begg,procinfo%endg + gindex_global(n-procinfo%begg+1) = gdc2glo(n) enddo - lsize = end-beg+1 - gsize = lni * lnj - call mct_gsMap_init(gsMap_lnd_gdc2glo, gindex, mpicom, comp_id, lsize, gsize) - deallocate(gindex) - ! Diagnostic output + deallocate(clumpcnt) + deallocate(gdc2glo) + ! Diagnostic output if (masterproc) then write(iulog,*)' Surface Grid Characteristics' write(iulog,*)' longitude points = ',lni @@ -306,7 +304,6 @@ subroutine decompInit_lnd(lni,lnj,amask) write(iulog,*)' clumps per process = ',clump_pproc write(iulog,*) end if - call shr_sys_flush(iulog) end subroutine decompInit_lnd @@ -320,35 +317,41 @@ subroutine decompInit_clumps(lni,lnj,glc_behavior) ! set by clump_pproc ! ! !USES: - use subgridMod, only : subgrid_get_gcellinfo - use spmdMod + use subgridMod , only : subgrid_get_gcellinfo + use decompMod , only : bounds_type, clumps, nclumps, procinfo + use decompMod , only : get_proc_global, get_proc_bounds + use decompMod , only : numg, numl, numc, nump, numCohort + use decompMod , only : gindex_global + use glcBehaviorMod , only : glc_behavior_type ! ! !ARGUMENTS: - implicit none - integer , intent(in) :: lni,lnj ! land domain global size - type(glc_behavior_type), intent(in) :: glc_behavior + integer , intent(in) :: lni,lnj ! land domain global size + type(glc_behavior_type) , intent(in) :: glc_behavior ! ! !LOCAL VARIABLES: - integer :: ln,an ! indices - integer :: i,g,l,k ! indices - integer :: cid,pid ! indices - integer :: n,m,np ! indices - integer :: anumg ! lnd num gridcells - integer :: icells ! temporary - integer :: begg, endg ! temporary - integer :: ilunits ! temporary - integer :: icols ! temporary - integer :: ipatches ! temporary - integer :: icohorts ! temporary - integer :: ier ! error code - integer, allocatable :: allvecg(:,:) ! temporary vector "global" + integer :: ln,an ! indices + integer :: i,g,l,k ! indices + integer :: cid,pid ! indices + integer :: n,m,np ! indices + integer :: anumg ! lnd num gridcells + integer :: icells ! temporary + integer :: begg, endg ! temporary + integer :: ilunits ! temporary + integer :: icols ! temporary + integer :: ipatches ! temporary + integer :: icohorts ! temporary + integer :: ier ! error code + integer :: npmin,npmax,npint ! do loop values for printing + integer :: clmin,clmax ! do loop values for printing + type(bounds_type) :: bounds ! bounds + integer, allocatable :: allvecg(:,:) ! temporary vector "global" integer, allocatable :: allvecl(:,:) ! temporary vector "local" - integer :: ntest character(len=32), parameter :: subname = 'decompInit_clumps' !------------------------------------------------------------------------------ !--- assign gridcells to clumps (and thus pes) --- - call get_proc_bounds(begg, endg) + call get_proc_bounds(bounds) + begg = bounds%begg; endg = bounds%endg allocate(allvecl(nclumps,5)) ! local clumps [gcells,lunit,cols,patches,coh] allocate(allvecg(nclumps,5)) ! global clumps [gcells,lunit,cols,patches,coh] @@ -366,7 +369,7 @@ subroutine decompInit_clumps(lni,lnj,glc_behavior) allvecg= 0 allvecl= 0 do anumg = begg,endg - an = ldecomp%gdc2glo(anumg) + an = gindex_global(anumg - begg + 1) cid = lcid(an) ln = anumg call subgrid_get_gcellinfo (ln, nlunits=ilunits, ncols=icols, npatches=ipatches, & @@ -471,97 +474,186 @@ subroutine decompInit_clumps(lni,lnj,glc_behavior) deallocate(allvecg,allvecl) deallocate(lcid) + ! Diagnostic output + + call get_proc_global(ng=numg, nl=numl, nc=numc, np=nump, nCohorts=numCohort) + if (masterproc) then + write(iulog,*)' Surface Grid Characteristics' + write(iulog,*)' longitude points = ',lni + write(iulog,*)' latitude points = ',lnj + write(iulog,*)' total number of gridcells = ',numg + write(iulog,*)' total number of landunits = ',numl + write(iulog,*)' total number of columns = ',numc + write(iulog,*)' total number of patches = ',nump + write(iulog,*)' total number of cohorts = ',numCohort + write(iulog,*)' Decomposition Characteristics' + write(iulog,*)' clumps per process = ',clump_pproc + write(iulog,*) + end if + + ! Write out clump and proc info, one pe at a time, + ! barrier to control pes overwriting each other on stdout + call shr_sys_flush(iulog) + call mpi_barrier(mpicom,ier) + npmin = 0 + npmax = npes-1 + npint = 1 + if (dbug == 0) then + npmax = 0 + elseif (dbug == 1) then + npmax = min(npes-1,4) + elseif (dbug == 2) then + npint = npes/8 + endif + do np = npmin,npmax,npint + pid = np + if (dbug == 1) then + if (np == 2) pid=npes/2-1 + if (np == 3) pid=npes-2 + if (np == 4) pid=npes-1 + endif + pid = max(pid,0) + pid = min(pid,npes-1) + + if (iam == pid) then + write(iulog,*) + write(iulog,'(4(a,2x,i10))')'proc = ',pid, & + ' beg gridcell= ',procinfo%begg,' end gridcell= ',procinfo%endg, & + ' gridcells per proc = ',procinfo%ncells + write(iulog,'(4(a,2x,i10))')'proc = ',pid, & + ' beg landunit= ',procinfo%begl,' end landunit= ',procinfo%endl, & + ' landunits per proc = ',procinfo%nlunits + write(iulog,'(4(a,2x,i10))')'proc = ',pid, & + ' beg column = ',procinfo%begc,' end column = ',procinfo%endc, & + ' columns per proc = ',procinfo%ncols + write(iulog,'(4(a,2x,i10))')'proc = ',pid, & + ' beg patch = ',procinfo%begp,' end patch = ',procinfo%endp, & + ' patches per proc = ',procinfo%npatches + write(iulog,'(4(a,2x,i10))')'proc = ',pid, & + ' beg cohort = ',procinfo%begCohort,' end cohort = ',procinfo%endCohort, & + ' coh per proc = ',procinfo%nCohorts + write(iulog,'(2(a,2x,i10))')'proc = ',pid,' nclumps = ',procinfo%nclumps + if (dbug == 0) then + clmax = -1 + else + clmax = procinfo%nclumps + endif + do n = 1,clmax + cid = procinfo%cid(n) + write(iulog,'(6(a,2x,i10))')'proc = ',pid,' clump no = ',n, & + ' clump id= ',procinfo%cid(n), & + ' beg gridcell= ',clumps(cid)%begg,' end gridcell= ',clumps(cid)%endg, & + ' gridcells per clump= ',clumps(cid)%ncells + write(iulog,'(6(a,2x,i10))')'proc = ',pid,' clump no = ',n, & + ' clump id= ',procinfo%cid(n), & + ' beg landunit= ',clumps(cid)%begl,' end landunit= ',clumps(cid)%endl, & + ' landunits per clump = ',clumps(cid)%nlunits + write(iulog,'(6(a,2x,i10))')'proc = ',pid,' clump no = ',n, & + ' clump id= ',procinfo%cid(n), & + ' beg column = ',clumps(cid)%begc,' end column = ',clumps(cid)%endc, & + ' columns per clump = ',clumps(cid)%ncols + write(iulog,'(6(a,2x,i10))')'proc = ',pid,' clump no = ',n, & + ' clump id= ',procinfo%cid(n), & + ' beg patch = ',clumps(cid)%begp,' end patch = ',clumps(cid)%endp, & + ' patches per clump = ',clumps(cid)%npatches + write(iulog,'(6(a,2x,i10))')'proc = ',pid,' clump no = ',n, & + ' clump id= ',procinfo%cid(n), & + ' beg cohort = ',clumps(cid)%begCohort,' end cohort = ',clumps(cid)%endCohort, & + ' cohorts per clump = ',clumps(cid)%nCohorts + + end do + end if + call shr_sys_flush(iulog) + call mpi_barrier(mpicom,ier) + end do + write(iulog,*) + call shr_sys_flush(iulog) + end subroutine decompInit_clumps !------------------------------------------------------------------------------ subroutine decompInit_glcp(lni,lnj,glc_behavior) ! ! !DESCRIPTION: - ! Determine gsMaps for landunits, columns, patches and cohorts + ! Determine gindex for landunits, columns, patches and cohorts ! ! !USES: - use spmdMod - use spmdGathScatMod - use subgridMod, only : subgrid_get_gcellinfo + use clm_varctl , only : use_fates + use subgridMod , only : subgrid_get_gcellinfo + use decompMod , only : bounds_type, get_proc_global, get_proc_bounds + use decompMod , only : gindex_global + use decompMod , only : gindex_grc, gindex_lun, gindex_col, gindex_patch, gindex_Cohort + use decompMod , only : procinfo, clump_type, clumps, get_proc_global + use LandunitType , only : lun + use ColumnType , only : col + use PatchType , only : patch + use FatesInterfaceTypesMod , only : fates_maxElementsPerSite + use glcBehaviorMod , only : glc_behavior_type ! ! !ARGUMENTS: - implicit none - integer , intent(in) :: lni,lnj ! land domain global size - type(glc_behavior_type), intent(in) :: glc_behavior + integer , intent(in) :: lni,lnj ! land domain global size + type(glc_behavior_type) , intent(in) :: glc_behavior ! ! !LOCAL VARIABLES: - integer :: gi,li,ci,pi,coi ! indices - integer :: i,g,k,l,n,np ! indices - integer :: cid,pid ! indices - integer :: begg,endg ! beg,end gridcells - integer :: begl,endl ! beg,end landunits - integer :: begc,endc ! beg,end columns - integer :: begp,endp ! beg,end patches - integer :: begCohort,endCohort! beg,end cohorts - integer :: numg ! total number of gridcells across all processors - integer :: numl ! total number of landunits across all processors - integer :: numc ! total number of columns across all processors - integer :: nump ! total number of patches across all processors - integer :: numCohort ! fates cohorts - integer :: icells ! temporary - integer :: ilunits ! temporary - integer :: icols ! temporary - integer :: ipatches ! temporary - integer :: icohorts ! temporary - integer :: ier ! error code - integer :: npmin,npmax,npint ! do loop values for printing - integer :: clmin,clmax ! do loop values for printing - integer :: locsize,globsize ! used for gsMap init - integer :: ng ! number of gridcells in gsMap_lnd_gdc2glo - integer :: val1, val2 ! temporaries - integer, pointer :: gindex(:) ! global index for gsMap init - integer, pointer :: arrayglob(:) ! temporaroy - integer, pointer :: gstart(:), gcount(:) - integer, pointer :: lstart(:), lcount(:) - integer, pointer :: cstart(:), ccount(:) - integer, pointer :: pstart(:), pcount(:) - integer, pointer :: coStart(:), coCount(:) - integer, pointer :: ioff(:) - integer, parameter :: dbug=1 ! 0 = min, 1=normal, 2=much, 3=max - character(len=32), parameter :: subname = 'decompInit_glcp' + integer :: gi,li,ci,pi,coi ! indices + integer :: i,l,n,np ! indices + integer :: cid,pid ! indices + integer :: numg ! total number of land gridcells across all processors + integer :: numl ! total number of landunits across all processors + integer :: numc ! total number of columns across all processors + integer :: nump ! total number of patches across all processors + integer :: numCohort ! fates cohorts + integer :: ilunits ! temporary + integer :: icols ! temporary + integer :: ipatches ! temporary + integer :: icohorts ! temporary + integer :: ier ! error code + integer, pointer :: gcount(:) + integer, pointer :: lcount(:) + integer, pointer :: ccount(:) + integer, pointer :: pcount(:) + integer, pointer :: coCount(:) + type(bounds_type) :: bounds + integer, allocatable :: ioff(:) + integer, allocatable :: gridcells_per_pe(:) ! needed for gindex at all levels + integer, allocatable :: gridcell_offsets(:) ! needed for gindex at all levels + integer, allocatable :: index_gridcells(:) ! needed for gindex at all levels + integer, allocatable :: start_global(:) + integer, allocatable :: start(:) + integer, allocatable :: index_lndgridcells(:) + integer :: count + integer :: temp + integer :: lsize_g, lsize_l, lsize_c, lsize_p, lsize_cohort + integer :: gsize + Character(len=32), parameter :: subname = 'decompInit_glcp' !------------------------------------------------------------------------------ - !init + ! Get processor bounds - call get_proc_bounds(begg, endg, begl, endl, begc, endc, begp, endp, & - begCohort, endCohort) + call get_proc_bounds(bounds) call get_proc_global(ng=numg, nl=numl, nc=numc, np=nump, nCohorts=numCohort) - ! Determine global seg megs - - allocate(gstart(begg:endg)) - gstart(:) = 0 - allocate(gcount(begg:endg)) - gcount(:) = 0 - allocate(lstart(begg:endg)) - lstart(:) = 0 - allocate(lcount(begg:endg)) - lcount(:) = 0 - allocate(cstart(begg:endg)) - cstart(:) = 0 - allocate(ccount(begg:endg)) - ccount(:) = 0 - allocate(pstart(begg:endg)) - pstart(:) = 0 - allocate(pcount(begg:endg)) - pcount(:) = 0 - if ( use_fates ) then - allocate(coStart(begg:endg)) - coStart(:) = 0 - endif - allocate(coCount(begg:endg)) - coCount(:) = 0 - allocate(ioff(begg:endg)) - ioff(:) = 0 - - ! Determine gcount, lcount, ccount and pcount - - do gi = begg,endg + lsize_g = bounds%endg + lsize_l = bounds%endl + lsize_c = bounds%endc + lsize_p = bounds%endp + lsize_cohort = bounds%endCohort + gsize = nglob_x * nglob_y + + ! allocate module variables in decompMod.F90 + allocate(gindex_grc(lsize_g)) + allocate(gindex_lun(lsize_l)) + allocate(gindex_col(lsize_c)) + allocate(gindex_patch(lsize_p)) + allocate(gindex_cohort(lsize_cohort)) + + ! Determine counts + allocate(gcount(lsize_g)) ; gcount(:) = 0 + allocate(lcount(lsize_g)) ; lcount(:) = 0 + allocate(ccount(lsize_g)) ; ccount(:) = 0 + allocate(pcount(lsize_g)) ; pcount(:) = 0 + allocate(coCount(lsize_g)) ; coCount(:) = 0 + do gi = 1,lsize_g call subgrid_get_gcellinfo (gi, nlunits=ilunits, ncols=icols, npatches=ipatches, & ncohorts=icohorts, glc_behavior=glc_behavior) gcount(gi) = 1 ! number of gridcells for local gridcell index gi @@ -571,291 +663,195 @@ subroutine decompInit_glcp(lni,lnj,glc_behavior) coCount(gi) = icohorts ! number of fates cohorts for local gricell index gi enddo - ! Determine gstart, lstart, cstart, pstart, coStart for the OUTPUT 1d data structures - - ! gather the gdc subgrid counts to masterproc in glo order - ! compute glo ordered start indices from the counts - ! scatter the subgrid start indices back out to the gdc gridcells - ! set the local gindex array for the subgrid from the subgrid start and count arrays + ! --------------------------------------- + ! Arrays needed to determine gindex_xxx(:) + ! --------------------------------------- - ng = mct_gsmap_gsize(gsmap_lnd_gdc2glo) - allocate(arrayglob(ng)) + allocate(ioff(lsize_g)) - arrayglob(:) = 0 - call gather_data_to_master(gcount, arrayglob, grlnd) if (masterproc) then - val1 = arrayglob(1) - arrayglob(1) = 1 - do n = 2,ng - val2 = arrayglob(n) - arrayglob(n) = arrayglob(n-1) + val1 - val1 = val2 - enddo + allocate (gridcells_per_pe(0:npes-1)) + else + allocate(gridcells_per_pe(0)) endif - call scatter_data_from_master(gstart, arrayglob, grlnd) - - ! lstart for gridcell (n) is the total number of the landunits - ! over gridcells 1->n-1 + call mpi_gather(lsize_g, 1, MPI_INTEGER, gridcells_per_pe, 1, MPI_INTEGER, 0, mpicom, ier) - arrayglob(:) = 0 - call gather_data_to_master(lcount, arrayglob, grlnd) if (masterproc) then - val1 = arrayglob(1) - arrayglob(1) = 1 - do n = 2,ng - val2 = arrayglob(n) - arrayglob(n) = arrayglob(n-1) + val1 - val1 = val2 - enddo - endif - call scatter_data_from_master(lstart, arrayglob, grlnd) + allocate(gridcell_offsets(0:npes-1)) + gridcell_offsets(0) = 0 + do n = 1 ,npes-1 + gridcell_offsets(n) = gridcell_offsets(n-1) + gridcells_per_pe(n-1) + end do + else + allocate(gridcell_offsets(0)) + end if - arrayglob(:) = 0 - call gather_data_to_master(ccount, arrayglob, grlnd) if (masterproc) then - val1 = arrayglob(1) - arrayglob(1) = 1 - do n = 2,ng - val2 = arrayglob(n) - arrayglob(n) = arrayglob(n-1) + val1 - val1 = val2 - enddo - endif - call scatter_data_from_master(cstart, arrayglob, grlnd) + allocate(start_global(numg)) ! number of landunits in a gridcell + else + allocate(start_global(0)) + end if - arrayglob(:) = 0 - call gather_data_to_master(pcount, arrayglob, grlnd) - if (masterproc) then - val1 = arrayglob(1) - arrayglob(1) = 1 - do n = 2,ng - val2 = arrayglob(n) - arrayglob(n) = arrayglob(n-1) + val1 - val1 = val2 - enddo - endif - call scatter_data_from_master(pstart, arrayglob, grlnd) + allocate(start(lsize_g)) - if ( use_fates ) then - arrayglob(:) = 0 - call gather_data_to_master(coCount, arrayglob, grlnd) - if (masterproc) then - val1 = arrayglob(1) - arrayglob(1) = 1 - do n = 2,ng - val2 = arrayglob(n) - arrayglob(n) = arrayglob(n-1) + val1 - val1 = val2 - enddo - endif - call scatter_data_from_master(coStart, arrayglob, grlnd) - endif + ! --------------------------------------- + ! Gridcell gindex (compressed, no ocean points) + ! --------------------------------------- - deallocate(arrayglob) + ! gstart_global the global index of all of the land points in task order + call mpi_gatherv(gindex_global, lsize_g, MPI_INTEGER, start_global, & + gridcells_per_pe, gridcell_offsets, MPI_INTEGER, 0, mpicom, ier) - ! Gridcell gsmap (compressed, no ocean points) + if (masterproc) then + ! Create a global size index_gridcells that will have 0 for all ocean points + ! Fill the location of each land point with the gatherv location of that land point + allocate(index_gridcells(gsize)) + index_gridcells(:) = 0 + do n = 1,numg + ! if n = 3, start_global(3)=100, index_gridcells(100)=3 + ! n is the task order location - so for global index 100 - the task order location is 3 + index_gridcells(start_global(n)) = n + end do + + ! Create a land-only global index based on the original global index ordering + ! Count is the running global land index + allocate(index_lndgridcells(numg)) + count = 0 + do n = 1,gsize + if (index_gridcells(n) > 0) then + count = count + 1 + ! e.g. n=20, count=4 and index_gridcells(20)=100, then start_global(100)=4 + start_global(index_gridcells(n)) = count + index_lndgridcells(count) = index_gridcells(n) + end if + end do + deallocate(index_gridcells) + end if - allocate(gindex(begg:endg)) - i = begg-1 - do gi = begg,endg - if (gcount(gi) < 1) then - write(iulog,*) 'decompInit_glcp warning count g ',k,iam,g,gcount(g) - endif - do l = 1,gcount(gi) - i = i + 1 - if (i < begg .or. i > endg) then - write(iulog,*) 'decompInit_glcp error i ',i,begg,endg - call endrun(msg=errMsg(sourcefile, __LINE__)) - endif - gindex(i) = gstart(gi) + l - 1 - enddo - enddo - if (i /= endg) then - write(iulog,*) 'decompInit_glcp error size ',i,begg,endg - call endrun(msg=errMsg(sourcefile, __LINE__)) - endif - locsize = endg-begg+1 - globsize = numg - call mct_gsMap_init(gsmap_gce_gdc2glo, gindex, mpicom, comp_id, locsize, globsize) - deallocate(gindex) + ! Determine gindex_grc + call mpi_scatterv(start_global, gridcells_per_pe, gridcell_offsets, MPI_INTEGER, gindex_grc, & + lsize_g, MPI_INTEGER, 0, mpicom, ier) + deallocate(gcount) - ! Landunit gsmap + ! --------------------------------------- + ! Landunit gindex + ! --------------------------------------- + + start(:) = 0 + call mpi_gatherv(lcount, lsize_g, MPI_INTEGER, start_global, & + gridcells_per_pe, gridcell_offsets, MPI_INTEGER, 0, mpicom, ier) + if (masterproc) then + count = 1 + do n = 1,numg + temp = start_global(index_lndgridcells(n)) + start_global(index_lndgridcells(n)) = count + count = count + temp + end do + endif + call mpi_scatterv(start_global, gridcells_per_pe, gridcell_offsets, MPI_INTEGER, start, & + lsize_g, MPI_INTEGER, 0, mpicom, ier) - allocate(gindex(begl:endl)) ioff(:) = 0 - do li = begl,endl - gi = lun%gridcell(li) !===this is determined internally from how landunits are spread out in memory - gindex(li) = lstart(gi) + ioff(gi) !=== the output gindex is ALWAYS the same regardless of how landuntis are spread out in memory + do li = 1,lsize_l + gi = lun%gridcell(li) + gindex_lun(li) = start(gi) + ioff(gi) ioff(gi) = ioff(gi) + 1 - ! check that this is less than [lstart(gi) + lcount(gi)] enddo - locsize = endl-begl+1 - globsize = numl - call mct_gsMap_init(gsmap_lun_gdc2glo, gindex, mpicom, comp_id, locsize, globsize) - deallocate(gindex) + deallocate(lcount) + + ! --------------------------------------- + ! Column gindex + ! --------------------------------------- - ! Column gsmap + start(:) = 0 + call mpi_gatherv(ccount, lsize_g, MPI_INTEGER, start_global, & + gridcells_per_pe, gridcell_offsets, MPI_INTEGER, 0, mpicom, ier) + if (masterproc) then + count = 1 + do n = 1,numg + temp = start_global(index_lndgridcells(n)) + start_global(index_lndgridcells(n)) = count + count = count + temp + end do + endif + call mpi_scatterv(start_global, gridcells_per_pe, gridcell_offsets, MPI_INTEGER, start, & + lsize_g, MPI_INTEGER, 0, mpicom, ier) - allocate(gindex(begc:endc)) ioff(:) = 0 - do ci = begc,endc + do ci = 1,lsize_c gi = col%gridcell(ci) - gindex(ci) = cstart(gi) + ioff(gi) + gindex_col(ci) = start(gi) + ioff(gi) ioff(gi) = ioff(gi) + 1 - ! check that this is less than [cstart(gi) + ccount(gi)] enddo - locsize = endc-begc+1 - globsize = numc - call mct_gsMap_init(gsmap_col_gdc2glo, gindex, mpicom, comp_id, locsize, globsize) - deallocate(gindex) + deallocate(ccount) - ! PATCH gsmap + ! --------------------------------------- + ! PATCH gindex + ! --------------------------------------- + + start(:) = 0 + call mpi_gatherv(pcount, lsize_g, MPI_INTEGER, start_global, & + gridcells_per_pe, gridcell_offsets, MPI_INTEGER, 0, mpicom, ier) + if (masterproc) then + count = 1 + do n = 1,numg + temp = start_global(index_lndgridcells(n)) + start_global(index_lndgridcells(n)) = count + count = count + temp + end do + endif + call mpi_scatterv(start_global, gridcells_per_pe, gridcell_offsets, MPI_INTEGER, start, & + lsize_g, MPI_INTEGER, 0, mpicom, ier) - allocate(gindex(begp:endp)) ioff(:) = 0 - do pi = begp,endp + do pi = 1,lsize_p gi = patch%gridcell(pi) - gindex(pi) = pstart(gi) + ioff(gi) + gindex_patch(pi) = start(gi) + ioff(gi) ioff(gi) = ioff(gi) + 1 - ! check that this is less than [pstart(gi) + pcount(gi)] enddo - locsize = endp-begp+1 - globsize = nump - call mct_gsMap_init(gsmap_patch_gdc2glo, gindex, mpicom, comp_id, locsize, globsize) - deallocate(gindex) + deallocate(pcount) - ! FATES gsmap for the cohort/element vector + ! --------------------------------------- + ! FATES gindex for the cohort/element vector + ! --------------------------------------- if ( use_fates ) then - allocate(gindex(begCohort:endCohort)) + start(:) = 0 + call mpi_gatherv(coCount, lsize_g, MPI_INTEGER, start_global, & + gridcells_per_pe, gridcell_offsets, MPI_INTEGER, 0, mpicom, ier) + if (masterproc) then + count = 1 + do n = 1,numg + temp = start_global(index_lndgridcells(n)) + start_global(index_lndgridcells(n)) = count + count = count + temp + end do + endif + call mpi_scatterv(start_global, gridcells_per_pe, gridcell_offsets, MPI_INTEGER, start, & + lsize_g, MPI_INTEGER, 0, mpicom, ier) + ioff(:) = 0 - gi = begg - do coi = begCohort,endCohort - gindex(coi) = coStart(gi) + ioff(gi) + gi = 1 + do coi = 1, lsize_cohort + gindex_cohort(coi) = start(gi) + ioff(gi) ioff(gi) = ioff(gi) + 1 - if ( mod(coi, fates_maxElementsPerSite ) == 0 ) gi = gi + 1 + if ( mod(coi, fates_maxElementsPerSite ) == 0 ) then + gi = gi + 1 + end if enddo - locsize = endCohort-begCohort+1 - globsize = numCohort - call mct_gsMap_init(gsMap_cohort_gdc2glo, gindex, mpicom, comp_id, locsize, globsize) - deallocate(gindex) + deallocate(coCount) endif - ! Deallocate start/count arrays - deallocate(gstart, gcount) - deallocate(lstart, lcount) - deallocate(cstart, ccount) - deallocate(pstart, pcount) - if ( use_fates ) then - deallocate(coStart,coCount) - endif - deallocate(ioff) - - ! Diagnostic output - - if (masterproc) then - write(iulog,*)' Surface Grid Characteristics' - write(iulog,*)' longitude points = ',lni - write(iulog,*)' latitude points = ',lnj - write(iulog,*)' total number of gridcells = ',numg - write(iulog,*)' total number of landunits = ',numl - write(iulog,*)' total number of columns = ',numc - write(iulog,*)' total number of patches = ',nump - write(iulog,*)' total number of cohorts = ',numCohort - write(iulog,*)' Decomposition Characteristics' - write(iulog,*)' clumps per process = ',clump_pproc - write(iulog,*) - end if - - ! Write out clump and proc info, one pe at a time, - ! barrier to control pes overwriting each other on stdout - - call shr_sys_flush(iulog) - call mpi_barrier(mpicom,ier) - npmin = 0 - npmax = npes-1 - npint = 1 - if (dbug == 0) then - npmax = 0 - elseif (dbug == 1) then - npmax = min(npes-1,4) - elseif (dbug == 2) then - npint = npes/8 - endif - do np = npmin,npmax,npint - pid = np - if (dbug == 1) then - if (np == 2) pid=npes/2-1 - if (np == 3) pid=npes-2 - if (np == 4) pid=npes-1 - endif - pid = max(pid,0) - pid = min(pid,npes-1) + ! --------------------------------------- + ! Deallocate memory + ! --------------------------------------- - if (iam == pid) then - write(iulog,*) - write(iulog,*)'proc= ',pid,& - ' beg gridcell= ',procinfo%begg, & - ' end gridcell= ',procinfo%endg, & - ' total gridcells per proc= ',procinfo%ncells - write(iulog,*)'proc= ',pid,& - ' beg landunit= ',procinfo%begl, & - ' end landunit= ',procinfo%endl, & - ' total landunits per proc= ',procinfo%nlunits - write(iulog,*)'proc= ',pid,& - ' beg column = ',procinfo%begc, & - ' end column = ',procinfo%endc, & - ' total columns per proc = ',procinfo%ncols - write(iulog,*)'proc= ',pid,& - ' beg patch = ',procinfo%begp, & - ' end patch = ',procinfo%endp, & - ' total patches per proc = ',procinfo%npatches - write(iulog,*)'proc= ',pid,& - ' beg coh = ',procinfo%begCohort, & - ' end coh = ',procinfo%endCohort, & - ' total coh per proc = ',procinfo%nCohorts - write(iulog,*)'proc= ',pid,' nclumps = ',procinfo%nclumps - - clmin = 1 - clmax = procinfo%nclumps - if (dbug == 1) then - clmax = 1 - elseif (dbug == 0) then - clmax = -1 - endif - do n = clmin,clmax - cid = procinfo%cid(n) - write(iulog,*)'proc= ',pid,' clump no = ',n, & - ' clump id= ',procinfo%cid(n), & - ' beg gridcell= ',clumps(cid)%begg, & - ' end gridcell= ',clumps(cid)%endg, & - ' total gridcells per clump= ',clumps(cid)%ncells - write(iulog,*)'proc= ',pid,' clump no = ',n, & - ' clump id= ',procinfo%cid(n), & - ' beg landunit= ',clumps(cid)%begl, & - ' end landunit= ',clumps(cid)%endl, & - ' total landunits per clump = ',clumps(cid)%nlunits - write(iulog,*)'proc= ',pid,' clump no = ',n, & - ' clump id= ',procinfo%cid(n), & - ' beg column = ',clumps(cid)%begc, & - ' end column = ',clumps(cid)%endc, & - ' total columns per clump = ',clumps(cid)%ncols - write(iulog,*)'proc= ',pid,' clump no = ',n, & - ' clump id= ',procinfo%cid(n), & - ' beg patch = ',clumps(cid)%begp, & - ' end patch = ',clumps(cid)%endp, & - ' total patches per clump = ',clumps(cid)%npatches - write(iulog,*)'proc= ',pid,' clump no = ',n, & - ' clump id= ',procinfo%cid(n), & - ' beg cohort = ',clumps(cid)%begCohort, & - ' end cohort = ',clumps(cid)%endCohort, & - ' total cohorts per clump = ',clumps(cid)%nCohorts - end do - end if - call shr_sys_flush(iulog) - call mpi_barrier(mpicom,ier) - end do - call shr_sys_flush(iulog) + deallocate(ioff) + deallocate(gridcells_per_pe) + deallocate(gridcell_offsets) + deallocate(start) + deallocate(start_global) + if (allocated(index_lndgridcells)) deallocate(index_lndgridcells) end subroutine decompInit_glcp diff --git a/src/main/decompMod.F90 b/src/main/decompMod.F90 index 3f885e090b..0b4142dc9e 100644 --- a/src/main/decompMod.F90 +++ b/src/main/decompMod.F90 @@ -7,15 +7,12 @@ module decompMod ! ! !USES: use shr_kind_mod, only : r8 => shr_kind_r8 - ! Must use shr_sys_abort rather than endrun here to avoid circular dependency - use shr_sys_mod , only : shr_sys_abort + + use shr_sys_mod , only : shr_sys_abort ! use shr_sys_abort instead of endrun here to avoid circular dependency use clm_varctl , only : iulog - use clm_varcon , only : grlnd, nameg, namel, namec, namep, nameCohort - use mct_mod , only : mct_gsMap ! ! !PUBLIC TYPES: implicit none - integer, public :: clump_pproc ! number of clumps per MPI process ! Define possible bounds subgrid levels integer, parameter, public :: BOUNDS_SUBGRID_GRIDCELL = 1 @@ -29,25 +26,14 @@ module decompMod integer, parameter, public :: BOUNDS_LEVEL_CLUMP = 2 ! ! !PUBLIC MEMBER FUNCTIONS: - public get_beg ! get beg bound for a given subgrid level public get_end ! get end bound for a given subgrid level public get_proc_clumps ! number of clumps for this processor public get_proc_total ! total no. of gridcells, landunits, columns and patchs for any processor public get_proc_global ! total gridcells, landunits, columns, patchs across all processors public get_clmlevel_gsize ! get global size associated with clmlevel - public get_clmlevel_gsmap ! get gsmap associated with clmlevel - - interface get_clump_bounds - module procedure get_clump_bounds_old - module procedure get_clump_bounds_new - end interface + public get_clmlevel_gindex! get global size associated with clmlevel public get_clump_bounds ! clump beg and end gridcell,landunit,column,patch - - interface get_proc_bounds - module procedure get_proc_bounds_old - module procedure get_proc_bounds_new - end interface public get_proc_bounds ! this processor beg and end gridcell,landunit,column,patch ! !PRIVATE MEMBER FUNCTIONS: @@ -55,38 +41,30 @@ module decompMod ! !PRIVATE TYPES: private ! (now mostly public for decompinitmod) - integer,public :: nclumps ! total number of clumps across all processors - integer,public :: numg ! total number of gridcells on all procs - integer,public :: numl ! total number of landunits on all procs - integer,public :: numc ! total number of columns on all procs - integer,public :: nump ! total number of patchs on all procs - integer,public :: numCohort ! total number of fates cohorts on all procs - type bounds_type - integer :: begg, endg ! beginning and ending gridcell index - integer :: begl, endl ! beginning and ending landunit index - integer :: begc, endc ! beginning and ending column index - integer :: begp, endp ! beginning and ending patch index + integer :: begg, endg ! beginning and ending gridcell index + integer :: begl, endl ! beginning and ending landunit index + integer :: begc, endc ! beginning and ending column index + integer :: begp, endp ! beginning and ending patch index integer :: begCohort, endCohort ! beginning and ending cohort indices - - integer :: level ! whether defined on the proc or clump level - integer :: clump_index ! if defined on the clump level, this gives the clump index + integer :: level ! whether defined on the proc or clump level + integer :: clump_index ! if defined on the clump level, this gives the clump index end type bounds_type - public bounds_type + public :: bounds_type !---global information on each pe type processor_type - integer :: nclumps ! number of clumps for processor_type iam - integer,pointer :: cid(:) ! clump indices - integer :: ncells ! number of gridcells in proc - integer :: nlunits ! number of landunits in proc - integer :: ncols ! number of columns in proc - integer :: npatches ! number of patchs in proc - integer :: nCohorts ! number of cohorts in proc - integer :: begg, endg ! beginning and ending gridcell index - integer :: begl, endl ! beginning and ending landunit index - integer :: begc, endc ! beginning and ending column index - integer :: begp, endp ! beginning and ending patch index + integer :: nclumps ! number of clumps for processor_type iam + integer,pointer :: cid(:) ! clump indices + integer :: ncells ! number of gridcells in proc + integer :: nlunits ! number of landunits in proc + integer :: ncols ! number of columns in proc + integer :: npatches ! number of patchs in proc + integer :: nCohorts ! number of cohorts in proc + integer :: begg, endg ! beginning and ending gridcell index + integer :: begl, endl ! beginning and ending landunit index + integer :: begc, endc ! beginning and ending column index + integer :: begp, endp ! beginning and ending patch index integer :: begCohort, endCohort ! beginning and ending cohort indices end type processor_type public processor_type @@ -98,7 +76,7 @@ module decompMod integer :: ncells ! number of gridcells in clump integer :: nlunits ! number of landunits in clump integer :: ncols ! number of columns in clump - integer :: npatches ! number of patchs in clump + integer :: npatches ! number of patchs in clump integer :: nCohorts ! number of cohorts in proc integer :: begg, endg ! beginning and ending gridcell index integer :: begl, endl ! beginning and ending landunit index @@ -109,23 +87,21 @@ module decompMod public clump_type type(clump_type),public, allocatable :: clumps(:) - !---global information on each pe - !--- glo = 1d global sn ordered - !--- gdc = 1d global dc ordered compressed - type decomp_type - integer,pointer :: gdc2glo(:) ! 1d gdc to 1d glo - end type decomp_type - public decomp_type - type(decomp_type),public,target :: ldecomp - - type(mct_gsMap) ,public,target :: gsMap_lnd_gdc2glo ! GS map for full 2D land grid - type(mct_gsMap) ,public,target :: gsMap_gce_gdc2glo ! GS map for 1D gridcells - type(mct_gsMap) ,public,target :: gsMap_lun_gdc2glo ! GS map for 1D landunits - type(mct_gsMap) ,public,target :: gsMap_col_gdc2glo ! GS map for 1d columns - type(mct_gsMap) ,public,target :: gsMap_patch_gdc2glo ! GS map for 1D patches - type(mct_gsMap) ,public,target :: gsMap_cohort_gdc2glo ! GS map for 1D cohorts (only for FATES) - - type(mct_gsMap) ,public,target :: gsMap_lnd2Dsoi_gdc2glo ! GS map for full 3D land grid with soil levels as 3rd dim + ! ---global sizes + integer,public :: nclumps ! total number of clumps across all processors + integer,public :: numg ! total number of gridcells on all procs + integer,public :: numl ! total number of landunits on all procs + integer,public :: numc ! total number of columns on all procs + integer,public :: nump ! total number of patchs on all procs + integer,public :: numCohort ! total number of fates cohorts on all procs + + ! ---NOTE: the following are allocated with a lower bound of 1! + integer, public, pointer :: gindex_global(:) => null() ! includes ocean points + integer, public, pointer :: gindex_grc(:) => null() ! does not include ocean points + integer, public, pointer :: gindex_lun(:) => null() + integer, public, pointer :: gindex_col(:) => null() + integer, public, pointer :: gindex_patch(:) => null() + integer, public, pointer :: gindex_cohort(:) => null() !------------------------------------------------------------------------------ contains @@ -186,12 +162,11 @@ pure function get_end(bounds, subgrid_level) result(end_index) ! !USES: ! ! !ARGUMENTS: - integer :: end_index ! function result - type(bounds_type), intent(in) :: bounds - integer, intent(in) :: subgrid_level + integer :: end_index ! function result + type(bounds_type) , intent(in) :: bounds + integer , intent(in) :: subgrid_level ! ! !LOCAL VARIABLES: - character(len=*), parameter :: subname = 'get_end' !----------------------------------------------------------------------- @@ -213,7 +188,7 @@ pure function get_end(bounds, subgrid_level) result(end_index) end function get_end !------------------------------------------------------------------------------ - subroutine get_clump_bounds_new (n, bounds) + subroutine get_clump_bounds (n, bounds) ! ! !DESCRIPTION: ! Determine clump bounds @@ -236,56 +211,30 @@ subroutine get_clump_bounds_new (n, bounds) ! FIX(SPM, 090314) - for debugging fates and openMP !write(iulog,*) 'SPM omp debug decompMod 1 ', & !OMP_GET_NUM_THREADS(),OMP_GET_MAX_THREADS(),OMP_GET_THREAD_NUM() - if ( OMP_GET_NUM_THREADS() == 1 .and. OMP_GET_MAX_THREADS() > 1 )then call shr_sys_abort( trim(subname)//' ERROR: Calling from inside a non-threaded region)') end if #endif cid = procinfo%cid(n) - bounds%begp = clumps(cid)%begp - bounds%endp = clumps(cid)%endp - bounds%begc = clumps(cid)%begc - bounds%endc = clumps(cid)%endc - bounds%begl = clumps(cid)%begl - bounds%endl = clumps(cid)%endl - bounds%begg = clumps(cid)%begg - bounds%endg = clumps(cid)%endg - bounds%begCohort = clumps(cid)%begCohort - bounds%endCohort = clumps(cid)%endCohort - + bounds%begp = clumps(cid)%begp - procinfo%begp + 1 + bounds%endp = clumps(cid)%endp - procinfo%begp + 1 + bounds%begc = clumps(cid)%begc - procinfo%begc + 1 + bounds%endc = clumps(cid)%endc - procinfo%begc + 1 + bounds%begl = clumps(cid)%begl - procinfo%begl + 1 + bounds%endl = clumps(cid)%endl - procinfo%begl + 1 + bounds%begg = clumps(cid)%begg - procinfo%begg + 1 + bounds%endg = clumps(cid)%endg - procinfo%begg + 1 + bounds%begCohort = clumps(cid)%begCohort - procinfo%begCohort + 1 + bounds%endCohort = clumps(cid)%endCohort - procinfo%begCohort + 1 + bounds%level = BOUNDS_LEVEL_CLUMP bounds%clump_index = n - end subroutine get_clump_bounds_new - - !------------------------------------------------------------------------------ - subroutine get_clump_bounds_old (n, begg, endg, begl, endl, begc, endc, begp, endp, & - begCohort, endCohort) - integer, intent(in) :: n ! proc clump index - integer, intent(out) :: begp, endp ! clump beg and end patch indices - integer, intent(out) :: begc, endc ! clump beg and end column indices - integer, intent(out) :: begl, endl ! clump beg and end landunit indices - integer, intent(out) :: begg, endg ! clump beg and end gridcell indices - integer, intent(out) :: begCohort, endCohort ! cohort beg and end gridcell indices - integer :: cid ! clump id - !------------------------------------------------------------------------------ - - cid = procinfo%cid(n) - begp = clumps(cid)%begp - endp = clumps(cid)%endp - begc = clumps(cid)%begc - endc = clumps(cid)%endc - begl = clumps(cid)%begl - endl = clumps(cid)%endl - begg = clumps(cid)%begg - endg = clumps(cid)%endg - begCohort = clumps(cid)%begCohort - endCohort = clumps(cid)%endCohort - end subroutine get_clump_bounds_old + end subroutine get_clump_bounds !------------------------------------------------------------------------------ - subroutine get_proc_bounds_new (bounds) + subroutine get_proc_bounds (bounds) ! ! !DESCRIPTION: ! Retrieve processor bounds @@ -306,50 +255,26 @@ subroutine get_proc_bounds_new (bounds) ! FIX(SPM, 090314) - for debugging fates and openMP !write(*,*) 'SPM omp debug decompMod 2 ', & !OMP_GET_NUM_THREADS(),OMP_GET_MAX_THREADS(),OMP_GET_THREAD_NUM() - if ( OMP_GET_NUM_THREADS() > 1 )then call shr_sys_abort( trim(subname)//' ERROR: Calling from inside a threaded region') end if #endif - bounds%begp = procinfo%begp - bounds%endp = procinfo%endp - bounds%begc = procinfo%begc - bounds%endc = procinfo%endc - bounds%begl = procinfo%begl - bounds%endl = procinfo%endl - bounds%begg = procinfo%begg - bounds%endg = procinfo%endg - bounds%begCohort = procinfo%begCohort - bounds%endCohort = procinfo%endCohort + bounds%begp = 1 + bounds%endp = procinfo%endp - procinfo%begp + 1 + bounds%begc = 1 + bounds%endc = procinfo%endc - procinfo%begc + 1 + bounds%begl = 1 + bounds%endl = procinfo%endl - procinfo%begl + 1 + bounds%begg = 1 + bounds%endg = procinfo%endg - procinfo%begg + 1 + bounds%begCohort = 1 + bounds%endCohort = procinfo%endCohort - procinfo%begCohort + 1 bounds%level = BOUNDS_LEVEL_PROC bounds%clump_index = -1 ! irrelevant for proc, so assigned a bogus value - end subroutine get_proc_bounds_new - - !------------------------------------------------------------------------------ - subroutine get_proc_bounds_old (begg, endg, begl, endl, begc, endc, begp, endp, & - begCohort, endCohort) - - integer, optional, intent(out) :: begp, endp ! proc beg and end patch indices - integer, optional, intent(out) :: begc, endc ! proc beg and end column indices - integer, optional, intent(out) :: begl, endl ! proc beg and end landunit indices - integer, optional, intent(out) :: begg, endg ! proc beg and end gridcell indices - integer, optional, intent(out) :: begCohort, endCohort ! cohort beg and end gridcell indices - !------------------------------------------------------------------------------ - - if (present(begp)) begp = procinfo%begp - if (present(endp)) endp = procinfo%endp - if (present(begc)) begc = procinfo%begc - if (present(endc)) endc = procinfo%endc - if (present(begl)) begl = procinfo%begl - if (present(endl)) endl = procinfo%endl - if (present(begg)) begg = procinfo%begg - if (present(endg)) endg = procinfo%endg - if (present(begCohort)) begCohort = procinfo%begCohort - if (present(endCohort)) endCohort = procinfo%endCohort - end subroutine get_proc_bounds_old + end subroutine get_proc_bounds !------------------------------------------------------------------------------ subroutine get_proc_total(pid, ncells, nlunits, ncols, npatches, nCohorts) @@ -426,6 +351,7 @@ integer function get_clmlevel_gsize (clmlevel) ! ! !USES: use domainMod , only : ldomain + use clm_varcon, only : grlnd, nameg, namel, namec, namep, nameCohort ! ! !ARGUMENTS: character(len=*), intent(in) :: clmlevel !type of clm 1d array @@ -452,34 +378,37 @@ integer function get_clmlevel_gsize (clmlevel) end function get_clmlevel_gsize !----------------------------------------------------------------------- - subroutine get_clmlevel_gsmap (clmlevel, gsmap) + subroutine get_clmlevel_gindex (clmlevel, gindex) ! ! !DESCRIPTION: - ! Compute arguments for gatherv, scatterv for vectors + ! Get subgrid global index space + ! + ! !USES + use clm_varcon , only : grlnd, nameg, namel, namec, namep, nameCohort ! ! !ARGUMENTS: character(len=*), intent(in) :: clmlevel ! type of input data - type(mct_gsmap) , pointer :: gsmap + integer , pointer :: gindex(:) !---------------------------------------------------------------------- select case (clmlevel) case(grlnd) - gsmap => gsmap_lnd_gdc2glo + gindex => gindex_global case(nameg) - gsmap => gsmap_gce_gdc2glo + gindex => gindex_grc case(namel) - gsmap => gsmap_lun_gdc2glo + gindex => gindex_lun case(namec) - gsmap => gsmap_col_gdc2glo + gindex => gindex_col case(namep) - gsmap => gsmap_patch_gdc2glo + gindex => gindex_patch case(nameCohort) - gsmap => gsMap_cohort_gdc2glo + gindex => gindex_cohort case default - write(iulog,*) 'get_clmlevel_gsmap: Invalid expansion character: ',trim(clmlevel) + write(iulog,*) 'get_clmlevel_gindex: Invalid expansion character: ',trim(clmlevel) call shr_sys_abort() end select - end subroutine get_clmlevel_gsmap + end subroutine get_clmlevel_gindex end module decompMod diff --git a/src/main/histFileMod.F90 b/src/main/histFileMod.F90 index 9ce24163ff..7bd416d19e 100644 --- a/src/main/histFileMod.F90 +++ b/src/main/histFileMod.F90 @@ -3282,7 +3282,7 @@ subroutine hfields_1dinfo(t, mode) ! Write/define 1d info for history tape. ! ! !USES: - use decompMod , only : ldecomp + use decompMod , only : gindex_global use domainMod , only : ldomain, ldomain ! ! !ARGUMENTS: @@ -3294,6 +3294,7 @@ subroutine hfields_1dinfo(t, mode) integer :: k ! 1d index integer :: g,c,l,p ! indices integer :: ier ! errir status + integer :: gindex ! global gridcell index real(r8), pointer :: rgarr(:) ! temporary real(r8), pointer :: rcarr(:) ! temporary real(r8), pointer :: rlarr(:) ! temporary @@ -3302,7 +3303,7 @@ subroutine hfields_1dinfo(t, mode) integer , pointer :: icarr(:) ! temporary integer , pointer :: ilarr(:) ! temporary integer , pointer :: iparr(:) ! temporary - type(file_desc_t), pointer :: ncid ! netcdf file + type(file_desc_t), pointer :: ncid ! netcdf file type(bounds_type) :: bounds character(len=*),parameter :: subname = 'hfields_1dinfo' !----------------------------------------------------------------------- @@ -3463,11 +3464,13 @@ subroutine hfields_1dinfo(t, mode) call ncd_io(varname='grid1d_lon', data=grc%londeg, dim1name=nameg, ncid=ncid, flag='write') call ncd_io(varname='grid1d_lat', data=grc%latdeg, dim1name=nameg, ncid=ncid, flag='write') do g = bounds%begg,bounds%endg - igarr(g)= mod(ldecomp%gdc2glo(g)-1,ldomain%ni) + 1 + gindex = gindex_global(g-bounds%begg+1) + igarr(g)= mod(gindex-1,ldomain%ni) + 1 enddo call ncd_io(varname='grid1d_ixy', data=igarr , dim1name=nameg, ncid=ncid, flag='write') do g = bounds%begg,bounds%endg - igarr(g)= (ldecomp%gdc2glo(g) - 1)/ldomain%ni + 1 + gindex = gindex_global(g-bounds%begg+1) + igarr(g)= (gindex-1)/ldomain%ni + 1 enddo call ncd_io(varname='grid1d_jxy', data=igarr , dim1name=nameg, ncid=ncid, flag='write') @@ -3482,11 +3485,13 @@ subroutine hfields_1dinfo(t, mode) enddo call ncd_io(varname='land1d_lat', data=rlarr, dim1name=namel, ncid=ncid, flag='write') do l= bounds%begl,bounds%endl - ilarr(l) = mod(ldecomp%gdc2glo(lun%gridcell(l))-1,ldomain%ni) + 1 + gindex = gindex_global(lun%gridcell(l)-bounds%begg+1) + ilarr(l) = mod(gindex-1,ldomain%ni) + 1 enddo call ncd_io(varname='land1d_ixy', data=ilarr, dim1name=namel, ncid=ncid, flag='write') do l = bounds%begl,bounds%endl - ilarr(l) = (ldecomp%gdc2glo(lun%gridcell(l))-1)/ldomain%ni + 1 + gindex = gindex_global(lun%gridcell(l)-bounds%begg+1) + ilarr(l) = (gindex-1)/ldomain%ni + 1 enddo call ncd_io(varname='land1d_jxy' , data=ilarr , dim1name=namel, ncid=ncid, flag='write') ilarr = GetGlobalIndexArray(lun%gridcell(bounds%begl:bounds%endl), bounds%begl, bounds%endl, clmlevel=nameg) @@ -3506,11 +3511,13 @@ subroutine hfields_1dinfo(t, mode) enddo call ncd_io(varname='cols1d_lat', data=rcarr, dim1name=namec, ncid=ncid, flag='write') do c = bounds%begc,bounds%endc - icarr(c) = mod(ldecomp%gdc2glo(col%gridcell(c))-1,ldomain%ni) + 1 + gindex = gindex_global(col%gridcell(c)-bounds%begg+1) + icarr(c) = mod(gindex-1,ldomain%ni) + 1 enddo call ncd_io(varname='cols1d_ixy', data=icarr, dim1name=namec, ncid=ncid, flag='write') do c = bounds%begc,bounds%endc - icarr(c) = (ldecomp%gdc2glo(col%gridcell(c))-1)/ldomain%ni + 1 + gindex = gindex_global(col%gridcell(c)-bounds%begg+1) + icarr(c) = (gindex-1)/ldomain%ni + 1 enddo call ncd_io(varname='cols1d_jxy' , data=icarr ,dim1name=namec, ncid=ncid, flag='write') icarr = GetGlobalIndexArray(col%gridcell(bounds%begc:bounds%endc), bounds%begc, bounds%endc, clmlevel=nameg) @@ -3540,11 +3547,13 @@ subroutine hfields_1dinfo(t, mode) enddo call ncd_io(varname='pfts1d_lat', data=rparr, dim1name=namep, ncid=ncid, flag='write') do p = bounds%begp,bounds%endp - iparr(p) = mod(ldecomp%gdc2glo(patch%gridcell(p))-1,ldomain%ni) + 1 + gindex = gindex_global(patch%gridcell(p)-bounds%begg+1) + iparr(p) = mod(gindex-1,ldomain%ni) + 1 enddo call ncd_io(varname='pfts1d_ixy', data=iparr, dim1name=namep, ncid=ncid, flag='write') do p = bounds%begp,bounds%endp - iparr(p) = (ldecomp%gdc2glo(patch%gridcell(p))-1)/ldomain%ni + 1 + gindex = gindex_global(patch%gridcell(p)-bounds%begg+1) + iparr(p) = (gindex-1)/ldomain%ni + 1 enddo call ncd_io(varname='pfts1d_jxy' , data=iparr , dim1name=namep, ncid=ncid, flag='write') diff --git a/src/main/initGridCellsMod.F90 b/src/main/initGridCellsMod.F90 index eb34161f47..7a8a9404c0 100644 --- a/src/main/initGridCellsMod.F90 +++ b/src/main/initGridCellsMod.F90 @@ -18,7 +18,7 @@ module initGridCellsMod use abortutils , only : endrun use clm_varctl , only : iulog use clm_varcon , only : namep, namec, namel, nameg - use decompMod , only : bounds_type, ldecomp + use decompMod , only : bounds_type use GridcellType , only : grc use LandunitType , only : lun use ColumnType , only : col @@ -48,7 +48,7 @@ module initGridCellsMod contains !------------------------------------------------------------------------ - subroutine initGridcells(glc_behavior) + subroutine initGridcells(bounds_clump, glc_behavior) ! ! !DESCRIPTION: ! Initialize sub-grid mapping and allocates space for derived type hierarchy. @@ -56,7 +56,6 @@ subroutine initGridcells(glc_behavior) ! ! !USES use domainMod , only : ldomain - use decompMod , only : get_proc_bounds, get_clump_bounds, get_proc_clumps use subgridWeightsMod , only : compute_higher_order_weights use landunit_varcon , only : istsoil, istwet, istdlak, istice use landunit_varcon , only : isturb_tbd, isturb_hd, isturb_md, istcrop @@ -64,13 +63,11 @@ subroutine initGridcells(glc_behavior) use shr_const_mod , only : SHR_CONST_PI ! ! !ARGUMENTS: + type(bounds_type) , intent(in) :: bounds_clump type(glc_behavior_type), intent(in) :: glc_behavior ! ! !LOCAL VARIABLES: integer :: nc,li,ci,pi,gdc ! indices - integer :: nclumps ! number of clumps on this processor - type(bounds_type) :: bounds_proc - type(bounds_type) :: bounds_clump !------------------------------------------------------------------------ ! Notes about how this routine is arranged, and its implications for the arrangement @@ -117,98 +114,86 @@ subroutine initGridcells(glc_behavior) ! Column: 1 1 2 2 3 3 4 4 5 5 6 6 ! Cohort: 1 2 1 2 1 2 1 2 1 2 1 2 - nclumps = get_proc_clumps() + ! For each land gridcell on global grid determine landunit, column and patch properties + + li = bounds_clump%begl-1 + ci = bounds_clump%begc-1 + pi = bounds_clump%begp-1 - ! FIX(SPM,032414) add private vars for cohort and perhaps patch dimension - !$OMP PARALLEL DO PRIVATE (nc, bounds_clump, li, ci, pi, gdc) - do nc = 1, nclumps - - call get_clump_bounds(nc, bounds_clump) - - ! For each land gridcell on global grid determine landunit, column and patch properties - - li = bounds_clump%begl-1 - ci = bounds_clump%begc-1 - pi = bounds_clump%begp-1 - - ! Determine naturally vegetated landunit - do gdc = bounds_clump%begg,bounds_clump%endg - call set_landunit_veg_compete( & - ltype=istsoil, gi=gdc, li=li, ci=ci, pi=pi) - end do - - ! Determine crop landunit - do gdc = bounds_clump%begg,bounds_clump%endg - call set_landunit_crop_noncompete( & - ltype=istcrop, gi=gdc, li=li, ci=ci, pi=pi) - end do + ! Determine naturally vegetated landunit + do gdc = bounds_clump%begg,bounds_clump%endg + call set_landunit_veg_compete( & + ltype=istsoil, gi=gdc, li=li, ci=ci, pi=pi) + end do - ! Determine urban tall building district landunit - do gdc = bounds_clump%begg,bounds_clump%endg - call set_landunit_urban( & - ltype=isturb_tbd, gi=gdc, li=li, ci=ci, pi=pi) + ! Determine crop landunit + do gdc = bounds_clump%begg,bounds_clump%endg + call set_landunit_crop_noncompete( & + ltype=istcrop, gi=gdc, li=li, ci=ci, pi=pi) + end do - end do + ! Determine urban tall building district landunit + do gdc = bounds_clump%begg,bounds_clump%endg + call set_landunit_urban( & + ltype=isturb_tbd, gi=gdc, li=li, ci=ci, pi=pi) - ! Determine urban high density landunit - do gdc = bounds_clump%begg,bounds_clump%endg - call set_landunit_urban( & - ltype=isturb_hd, gi=gdc, li=li, ci=ci, pi=pi) - end do + end do - ! Determine urban medium density landunit - do gdc = bounds_clump%begg,bounds_clump%endg - call set_landunit_urban( & - ltype=isturb_md, gi=gdc, li=li, ci=ci, pi=pi) - end do + ! Determine urban high density landunit + do gdc = bounds_clump%begg,bounds_clump%endg + call set_landunit_urban( & + ltype=isturb_hd, gi=gdc, li=li, ci=ci, pi=pi) + end do - ! Determine lake, wetland and glacier landunits - do gdc = bounds_clump%begg,bounds_clump%endg - call set_landunit_wet_lake( & - ltype=istdlak, gi=gdc, li=li, ci=ci, pi=pi) - end do + ! Determine urban medium density landunit + do gdc = bounds_clump%begg,bounds_clump%endg + call set_landunit_urban( & + ltype=isturb_md, gi=gdc, li=li, ci=ci, pi=pi) + end do - do gdc = bounds_clump%begg,bounds_clump%endg - call set_landunit_wet_lake( & - ltype=istwet, gi=gdc, li=li, ci=ci, pi=pi) - end do + ! Determine lake, wetland and glacier landunits + do gdc = bounds_clump%begg,bounds_clump%endg + call set_landunit_wet_lake( & + ltype=istdlak, gi=gdc, li=li, ci=ci, pi=pi) + end do - do gdc = bounds_clump%begg,bounds_clump%endg - call set_landunit_ice( & - glc_behavior = glc_behavior, & - ltype=istice, gi=gdc, li=li, ci=ci, pi=pi) - end do + do gdc = bounds_clump%begg,bounds_clump%endg + call set_landunit_wet_lake( & + ltype=istwet, gi=gdc, li=li, ci=ci, pi=pi) + end do - ! Ensure that we have set the expected number of patchs, cols and landunits for this clump - SHR_ASSERT_FL(li == bounds_clump%endl, sourcefile, __LINE__) - SHR_ASSERT_FL(ci == bounds_clump%endc, sourcefile, __LINE__) - SHR_ASSERT_FL(pi == bounds_clump%endp, sourcefile, __LINE__) + do gdc = bounds_clump%begg,bounds_clump%endg + call set_landunit_ice( & + glc_behavior = glc_behavior, & + ltype=istice, gi=gdc, li=li, ci=ci, pi=pi) + end do - ! Set some other gridcell-level variables + ! Ensure that we have set the expected number of patchs, cols and landunits for this clump + SHR_ASSERT_FL(li == bounds_clump%endl, sourcefile, __LINE__) + SHR_ASSERT_FL(ci == bounds_clump%endc, sourcefile, __LINE__) + SHR_ASSERT_FL(pi == bounds_clump%endp, sourcefile, __LINE__) - do gdc = bounds_clump%begg,bounds_clump%endg - grc%gindex(gdc) = ldecomp%gdc2glo(gdc) - grc%area(gdc) = ldomain%area(gdc) - grc%latdeg(gdc) = ldomain%latc(gdc) - grc%londeg(gdc) = ldomain%lonc(gdc) - grc%lat(gdc) = grc%latdeg(gdc) * SHR_CONST_PI/180._r8 - grc%lon(gdc) = grc%londeg(gdc) * SHR_CONST_PI/180._r8 - enddo + ! Set some other gridcell-level variables - ! Fill in subgrid datatypes + do gdc = bounds_clump%begg,bounds_clump%endg + grc%area(gdc) = ldomain%area(gdc) + grc%latdeg(gdc) = ldomain%latc(gdc) + grc%londeg(gdc) = ldomain%lonc(gdc) + grc%lat(gdc) = grc%latdeg(gdc) * SHR_CONST_PI/180._r8 + grc%lon(gdc) = grc%londeg(gdc) * SHR_CONST_PI/180._r8 + enddo - call clm_ptrs_compdown(bounds_clump) + ! Fill in subgrid datatypes - ! By putting this check within the loop over clumps, we ensure that (for example) - ! if a clump is responsible for landunit L, then that same clump is also - ! responsible for all columns and patchs in L. - call clm_ptrs_check(bounds_clump) + call clm_ptrs_compdown(bounds_clump) - ! Set patch%wtlunit, patch%wtgcell and col%wtgcell - call compute_higher_order_weights(bounds_clump) + ! By putting this check within the loop over clumps, we ensure that (for example) + ! if a clump is responsible for landunit L, then that same clump is also + ! responsible for all columns and patchs in L. + call clm_ptrs_check(bounds_clump) - end do - !$OMP END PARALLEL DO + ! Set patch%wtlunit, patch%wtgcell and col%wtgcell + call compute_higher_order_weights(bounds_clump) end subroutine initGridcells @@ -513,7 +498,6 @@ subroutine set_landunit_urban (ltype, gi, li, ci, pi) use subgridMod , only : subgrid_get_info_urban_tbd, subgrid_get_info_urban_hd use subgridMod , only : subgrid_get_info_urban_md use UrbanParamsType , only : urbinp - use decompMod , only : ldecomp use pftconMod , only : noveg ! ! !ARGUMENTS: diff --git a/src/main/ncdio_pio.F90.in b/src/main/ncdio_pio.F90.in index b321dc04bc..e9a8e3e2bb 100644 --- a/src/main/ncdio_pio.F90.in +++ b/src/main/ncdio_pio.F90.in @@ -20,10 +20,9 @@ module ncdio_pio use clm_varcon , only : spval,ispval, grlnd, nameg, namel, namec, namep use clm_varctl , only : single_column, iulog use shr_sys_mod , only : shr_sys_flush - use decompMod , only : get_clmlevel_gsize,get_clmlevel_gsmap + use decompMod , only : get_clmlevel_gsize, get_clmlevel_gindex use perf_mod , only : t_startf, t_stopf use fileutils , only : getavu, relavu - use mct_mod , only : mct_gsMap, mct_gsMap_lsize, mct_gsMap_gsize, mct_gsMap_orderedPoints use pio , only : file_desc_t, io_desc_t, iosystem_desc_t use pio , only : pio_bcast_error, pio_char, pio_clobber, pio_closefile, pio_createfile, pio_def_dim use pio , only : pio_def_var, pio_double, pio_redef, pio_enddef, pio_get_att, pio_get_var, pio_global, pio_initdecomp @@ -2576,19 +2575,17 @@ contains ! !LOCAL VARIABLES: integer :: k,m,n,cnt ! indices integer :: basetype ! pio basetype - integer :: gsmap_lsize ! local size of gsmap - integer :: gsmap_gsize ! global size of gsmap integer :: fullsize ! size of entire array on cdf + integer :: lsize ! local size of clmlevel gindex integer :: gsize ! global size of clmlevel integer :: vsize ! other dimensions integer :: vsize1, vsize2 ! other dimensions integer :: status ! error status logical :: found ! true => found created iodescriptor integer :: ndims_file ! temporary - character(len=64) dimname_file ! dimension name on file - character(len=64) dimname_iodesc ! dimension name from io descriptor - type(mct_gsMap),pointer :: gsmap ! global seg map - integer, pointer,dimension(:) :: gsmOP ! gsmap ordered points + character(len=64) :: dimname_file ! dimension name on file + character(len=64) :: dimname_iodesc ! dimension name from io descriptor + integer, pointer :: gindex(:) ! global index space integer(pio_offset_kind), pointer :: compDOF(:) character(len=32) :: subname = 'ncd_getiodesc' !------------------------------------------------------------------------ @@ -2662,12 +2659,12 @@ contains call shr_sys_abort(errMsg(sourcefile,__LINE__)) end if - call get_clmlevel_gsmap(clmlevel,gsmap) + call get_clmlevel_gindex(clmlevel, gindex) + if (.not. associated(gindex)) then + call shr_sys_abort('gindex for clmlevel '//trim(clmlevel)//' is not associated') + end if gsize = get_clmlevel_gsize(clmlevel) - gsmap_lsize = mct_gsmap_lsize(gsmap,mpicom) - gsmap_gsize = mct_gsmap_gsize(gsmap) - - call mct_gsMap_orderedPoints(gsmap,iam,gsmOP) + lsize = size(gindex) fullsize = 1 do n = 1,ndims @@ -2680,15 +2677,15 @@ contains call shr_sys_abort(errMsg(sourcefile, __LINE__)) endif - allocate(compDOF(gsmap_lsize*vsize)) + allocate(compDOF(lsize*vsize)) if (present(switchdim)) then if (switchdim) then cnt = 0 - do m = 1,gsmap_lsize + do m = 1,lsize do n = 1,vsize cnt = cnt + 1 - compDOF(cnt) = (gsmOP(m)-1)*vsize + n + compDOF(cnt) = (gindex(m)-1)*vsize + n enddo enddo else @@ -2706,18 +2703,18 @@ contains cnt = 0 do k = 1,vsize2 do n = 1,vsize1 - do m = 1,gsmap_lsize + do m = 1,lsize cnt = cnt + 1 - compDOF(cnt) = (k-1)*vsize1*gsmap_gsize + (n-1)*gsmap_gsize + gsmOP(m) + compDOF(cnt) = (k-1)*vsize1*gsize + (n-1)*gsize + gindex(m) enddo enddo end do else cnt = 0 do n = 1,vsize - do m = 1,gsmap_lsize + do m = 1,lsize cnt = cnt + 1 - compDOF(cnt) = (n-1)*gsmap_gsize + gsmOP(m) + compDOF(cnt) = (n-1)*gsize + gindex(m) enddo enddo end if @@ -2726,17 +2723,15 @@ contains if (debug > 1) then do m = 0,npes-1 if (iam == m) then - write(iulog,*) trim(subname),' sizes1 = ',iam,gsize,gsmap_gsize,gsmap_lsize - write(iulog,*) trim(subname),' sizes2 = ',iam,fullsize,npes,vsize - write(iulog,*) trim(subname),' compDOF = ',iam,size(compDOF),minval(compDOF),maxval(compDOF) + write(iulog,'(a,3(i10,2x))') trim(subname)//' sizes1 = ',iam,gsize,lsize + write(iulog,'(a,4(i10,2x))') trim(subname)//' sizes2 = ',iam,fullsize,npes,vsize + write(iulog,'(a,5(i10,2x))') trim(subname)//' compDOF = ',iam,size(compDOF),minval(compDOF),maxval(compDOF) call shr_sys_flush(iulog) endif call mpi_barrier(mpicom,status) enddo endif - deallocate(gsmOP) - call pio_initdecomp(pio_subsystem, xTYPE, dims(1:ndims), compDOF, iodesc_list(iodnum)%iodesc) deallocate(compDOF) diff --git a/src/main/subgridRestMod.F90 b/src/main/subgridRestMod.F90 index 78a00b5492..e789866ee5 100644 --- a/src/main/subgridRestMod.F90 +++ b/src/main/subgridRestMod.F90 @@ -8,7 +8,7 @@ module subgridRestMod use shr_log_mod , only : errMsg => shr_log_errMsg use glc_elevclass_mod , only : glc_get_num_elevation_classes, glc_get_elevclass_bounds use abortutils , only : endrun - use decompMod , only : bounds_type, BOUNDS_LEVEL_PROC, ldecomp + use decompMod , only : bounds_type, BOUNDS_LEVEL_PROC, gindex_global use domainMod , only : ldomain use clm_time_manager , only : get_curr_date use clm_varcon , only : nameg, namel, namec, namep @@ -115,6 +115,7 @@ subroutine subgridRest_write_only(bounds, ncid, flag) integer , pointer :: ilarr(:) ! temporary integer , pointer :: icarr(:) ! temporary integer , pointer :: iparr(:) ! temporary + integer :: gindex ! global index real(r8), pointer :: elevclass_bounds(:) @@ -141,7 +142,8 @@ subroutine subgridRest_write_only(bounds, ncid, flag) interpinic_flag='skip', readvar=readvar, data=grc%latdeg) do g=bounds%begg,bounds%endg - igarr(g)= mod(ldecomp%gdc2glo(g)-1,ldomain%ni) + 1 + gindex = gindex_global(g-bounds%begg+1) + igarr(g)= mod(gindex-1,ldomain%ni) + 1 enddo call restartvar(ncid=ncid, flag=flag, varname='grid1d_ixy', xtype=ncd_int, & dim1name='gridcell', & @@ -149,7 +151,8 @@ subroutine subgridRest_write_only(bounds, ncid, flag) interpinic_flag='skip', readvar=readvar, data=igarr) do g=bounds%begg,bounds%endg - igarr(g)= (ldecomp%gdc2glo(g) - 1)/ldomain%ni + 1 + gindex = gindex_global(g-bounds%begg+1) + igarr(g)= (gindex - 1)/ldomain%ni + 1 enddo call restartvar(ncid=ncid, flag=flag, varname='grid1d_jxy', xtype=ncd_int, & dim1name='gridcell', & @@ -182,7 +185,8 @@ subroutine subgridRest_write_only(bounds, ncid, flag) interpinic_flag='skip', readvar=readvar, data=rlarr) do l=bounds%begl,bounds%endl - ilarr(l) = mod(ldecomp%gdc2glo(lun%gridcell(l))-1,ldomain%ni) + 1 + gindex = gindex_global(lun%gridcell(l)-bounds%begg+1) + ilarr(l) = mod(gindex-1,ldomain%ni) + 1 enddo call restartvar(ncid=ncid, flag=flag, varname='land1d_ixy', xtype=ncd_int, & dim1name='landunit', & @@ -190,7 +194,8 @@ subroutine subgridRest_write_only(bounds, ncid, flag) interpinic_flag='skip', readvar=readvar, data=ilarr) do l=bounds%begl,bounds%endl - ilarr(l) = (ldecomp%gdc2glo(lun%gridcell(l))-1)/ldomain%ni + 1 + gindex = gindex_global(lun%gridcell(l)-bounds%begg+1) + ilarr(l) = (gindex-1)/ldomain%ni + 1 end do call restartvar(ncid=ncid, flag=flag, varname='land1d_jxy', xtype=ncd_int, & dim1name='landunit', & @@ -245,7 +250,8 @@ subroutine subgridRest_write_only(bounds, ncid, flag) interpinic_flag='skip', readvar=readvar, data=rcarr) do c= bounds%begc, bounds%endc - icarr(c) = mod(ldecomp%gdc2glo(col%gridcell(c))-1,ldomain%ni) + 1 + gindex = gindex_global(col%gridcell(c)-bounds%begg+1) + icarr(c) = mod(gindex-1,ldomain%ni) + 1 enddo call restartvar(ncid=ncid, flag=flag, varname='cols1d_ixy', xtype=ncd_int, & dim1name='column', & @@ -253,7 +259,8 @@ subroutine subgridRest_write_only(bounds, ncid, flag) interpinic_flag='skip', readvar=readvar, data=icarr) do c= bounds%begc, bounds%endc - icarr(c) = (ldecomp%gdc2glo(col%gridcell(c))-1)/ldomain%ni + 1 + gindex = gindex_global(col%gridcell(c)-bounds%begg+1) + icarr(c) = (gindex-1)/ldomain%ni + 1 enddo call restartvar(ncid=ncid, flag=flag, varname='cols1d_jxy', xtype=ncd_int, & dim1name='column', & @@ -346,7 +353,8 @@ subroutine subgridRest_write_only(bounds, ncid, flag) interpinic_flag='skip', readvar=readvar, data=rparr) do p=bounds%begp,bounds%endp - iparr(p) = mod(ldecomp%gdc2glo(patch%gridcell(p))-1,ldomain%ni) + 1 + gindex = gindex_global(patch%gridcell(p)-bounds%begg+1) + iparr(p) = mod(gindex-1,ldomain%ni) + 1 enddo call restartvar(ncid=ncid, flag=flag, varname='pfts1d_ixy', xtype=ncd_int, & dim1name='pft', & @@ -354,7 +362,8 @@ subroutine subgridRest_write_only(bounds, ncid, flag) interpinic_flag='skip', readvar=readvar, data=iparr) do p=bounds%begp,bounds%endp - iparr(p) = (ldecomp%gdc2glo(patch%gridcell(p))-1)/ldomain%ni + 1 + gindex = gindex_global(patch%gridcell(p)-bounds%begg+1) + iparr(p) = (gindex-1)/ldomain%ni + 1 enddo call restartvar(ncid=ncid, flag=flag, varname='pfts1d_jxy', xtype=ncd_int, & dim1name='pft', & diff --git a/src/unit_test_shr/unittestSubgridMod.F90 b/src/unit_test_shr/unittestSubgridMod.F90 index 2a02815614..3b42976e79 100644 --- a/src/unit_test_shr/unittestSubgridMod.F90 +++ b/src/unit_test_shr/unittestSubgridMod.F90 @@ -83,12 +83,11 @@ module unittestSubgridMod ! Indices of initial grid cell / landunit / column / patch ! - ! Note that we do NOT start at 1, in order to catch any code that assumes indices start - ! at 1. - integer, parameter, public :: begg = 11 - integer, parameter, public :: begl = 21 - integer, parameter, public :: begc = 31 - integer, parameter, public :: begp = 41 + ! Now we do start at 1. + integer, parameter, public :: begg = 1 + integer, parameter, public :: begl = 1 + integer, parameter, public :: begc = 1 + integer, parameter, public :: begp = 1 ! Indices of final grid cell / landunit / column / patch ! Note that these are the final indices of the allocated arrays, which may be greater diff --git a/src/utils/spmdGathScatMod.F90 b/src/utils/spmdGathScatMod.F90 deleted file mode 100644 index b3314d2da6..0000000000 --- a/src/utils/spmdGathScatMod.F90 +++ /dev/null @@ -1,540 +0,0 @@ -module spmdGathScatMod - -!----------------------------------------------------------------------- -!BOP -! -! !MODULE: spmdGathScatMod -! -! !DESCRIPTION: -! Perform SPMD gather and scatter operations. -! -! !USES: - use clm_varcon, only: spval, ispval - use decompMod, only : get_clmlevel_gsmap - use shr_kind_mod, only: r8 => shr_kind_r8 - use spmdMod , only : masterproc, mpicom - use mct_mod , only : mct_aVect, mct_gsMap - use mct_mod , only : mct_aVect_init, mct_aVect_importIattr, mct_aVect_scatter - use mct_mod , only : mct_aVect_gather, mct_aVect_exportIattr, mct_aVect_clean - use mct_mod , only : mct_aVect_exportRattr, mct_aVect_importRattr - use abortutils, only : endrun - use clm_varctl, only : iulog - use perf_mod , only : t_startf, t_stopf -! -! !PUBLIC TYPES: - implicit none - private -! -! !PUBLIC MEMBER FUNCTIONS: - public scatter_data_from_master, gather_data_to_master - - interface scatter_data_from_master - module procedure scatter_1darray_int - module procedure scatter_1darray_real - end interface - - interface gather_data_to_master - module procedure gather_1darray_int - module procedure gather_1darray_real - end interface -! -! !REVISION HISTORY: -! Author: Mariana Vertenstein -! -!EOP -! - integer,private,parameter :: debug = 0 - -!----------------------------------------------------------------------- - -contains - -!----------------------------------------------------------------------- -!BOP -! -! !IROUTINE: scatter_1darray_int -! -! !INTERFACE: - subroutine scatter_1darray_int (alocal, aglobal, clmlevel) -! -! !DESCRIPTION: -! Wrapper routine to scatter int 1d array -! -! !USES: -! -! !ARGUMENTS: - implicit none - integer , pointer :: alocal(:) ! local data (output) - integer , pointer :: aglobal(:) ! global data (input) - character(len=*) ,intent(in) :: clmlevel ! type of input grid -! -! !REVISION HISTORY: -! Author: T Craig -! -! -! !LOCAL VARIABLES: -!EOP - integer :: n1,n2,lb1,ub1,lb2,ub2 ! indices - integer :: lsize ! size of local array - type(mct_aVect) :: AVi, AVo ! attribute vectors - integer ,pointer :: adata(:) ! local data array - character(len=256) :: rstring ! real field list string - character(len=256) :: istring ! int field list string - character(len=8) :: fname ! arbitrary field name - type(mct_gsMap),pointer :: gsmap ! global seg map - character(len=*),parameter :: subname = 'scatter_1darray_int' - -!----------------------------------------------------------------------- - - call t_startf(trim(subname)//'_total') - call get_clmlevel_gsmap(clmlevel,gsmap) - - lb1 = lbound(alocal,dim=1) - ub1 = ubound(alocal,dim=1) - lb2 = 1 - ub2 = 1 - - rstring = "" - istring = "" - - do n2 = lb2,ub2 - write(fname,'(a1,i3.3)') 'f',n2-lb2+1 - if (len_trim(istring) == 0) then - istring = trim(fname) - else - istring = trim(istring)//":"//trim(fname) - endif - enddo - - if (masterproc .and. debug > 2) then - write(iulog,*) trim(subname),' strings:',trim(rstring),' ',trim(istring) - endif - - if (debug > 1) call t_startf(trim(subname)//'_pack') - - if (masterproc) then - lsize = size(aglobal,dim=1) - call mct_aVect_init(AVi,rList=trim(rstring),iList=trim(istring),lsize=lsize) - allocate(adata(lsize)) - do n2 = lb2,ub2 - adata(1:lsize) = aglobal(1:lsize) - write(fname,'(a1,i3.3)') 'f',n2-lb2+1 - call mct_aVect_importIattr(AVi,trim(fname),adata,lsize) - enddo - deallocate(adata) - endif - - if (debug > 1) call t_stopf(trim(subname)//'_pack') - if (debug > 1) call t_startf(trim(subname)//'_scat') - - call mct_aVect_scatter(AVi, AVo, gsmap, 0, mpicom) - - if (debug > 1) call t_stopf(trim(subname)//'_scat') - if (debug > 1) call t_startf(trim(subname)//'_upck') - - lsize = size(alocal,dim=1) - allocate(adata(lsize)) - do n2 = lb2,ub2 - write(fname,'(a1,i3.3)') 'f',n2-lb2+1 - call mct_aVect_exportIattr(AVo,trim(fname),adata,lsize) - do n1 = lb1,ub1 - alocal(n1) = adata(n1-lb1+1) - enddo - enddo - deallocate(adata) - - if (debug > 1) call t_stopf(trim(subname)//'_upck') - - if (masterproc) then - call mct_aVect_clean(AVi) - endif - call mct_aVect_clean(AVo) - - call t_stopf(trim(subname)//'_total') - - end subroutine scatter_1darray_int - -!----------------------------------------------------------------------- -!BOP -! -! !IROUTINE: gather_1darray_int -! -! !INTERFACE: - subroutine gather_1darray_int (alocal, aglobal, clmlevel, missing) -! -! !DESCRIPTION: -! Wrapper routine to gather int 1d array -! -! !USES: -! -! !ARGUMENTS: - implicit none - integer , pointer :: alocal(:) ! local data (output) - integer , pointer :: aglobal(:) ! global data (input) - character(len=*) ,intent(in) :: clmlevel ! type of input grid - integer ,optional,intent(in) :: missing ! missing value -! -! !REVISION HISTORY: -! Author: T Craig -! -! -! !LOCAL VARIABLES: -!EOP - integer :: n1,n2,lb1,ub1,lb2,ub2 ! indices - integer :: lsize ! size of local array - type(mct_aVect) :: AVi, AVo ! attribute vectors - integer ,pointer :: adata(:) ! temporary data array - integer ,pointer :: mvect(:) ! local array for mask - character(len=256) :: rstring ! real field list string - character(len=256) :: istring ! int field list string - character(len=8) :: fname ! arbitrary field name - type(mct_gsMap),pointer :: gsmap ! global seg map - character(len=*),parameter :: subname = 'gather_1darray_int' - -!----------------------------------------------------------------------- - - call t_startf(trim(subname)//'_total') - call get_clmlevel_gsmap(clmlevel,gsmap) - - lsize = size(alocal,dim=1) - lb1 = lbound(alocal,dim=1) - ub1 = ubound(alocal,dim=1) - lb2 = 1 - ub2 = 1 - - rstring = "" - istring = "" - - if (present(missing)) then - istring = "mask" - endif - - do n2 = lb2,ub2 - write(fname,'(a1,i3.3)') 'f',n2-lb2+1 - if (len_trim(istring) == 0) then - istring = trim(fname) - else - istring = trim(istring)//":"//trim(fname) - endif - enddo - - if (masterproc .and. debug > 2) then - write(iulog,*) trim(subname),' strings:',trim(rstring),' ',trim(istring) - endif - - call mct_aVect_init(AVi,rList=trim(rstring),iList=trim(istring),lsize=lsize) - - if (debug > 1) call t_startf(trim(subname)//'_pack') - allocate(adata(lsize)) - do n2 = lb2,ub2 - do n1 = lb1,ub1 - adata(n1-lb1+1) = alocal(n1) - enddo - write(fname,'(a1,i3.3)') 'f',n2-lb2+1 - call mct_aVect_importIattr(AVi,trim(fname),adata,lsize) - enddo - deallocate(adata) - - if (present(missing)) then - allocate(mvect(lsize)) - do n1 = lb1,ub1 - mvect(n1-lb1+1) = 1 - enddo - call mct_aVect_importIattr(AVi,"mask",mvect,lsize) - deallocate(mvect) - endif - - if (debug > 1) call t_stopf(trim(subname)//'_pack') - if (debug > 1) call t_startf(trim(subname)//'_gath') - - if (present(missing)) then -! tcx wait for update in mct, then get rid of "mask" -! call mct_aVect_gather(AVi, AVo, gsmap, 0, mpicom, missing = missing) - call mct_aVect_gather(AVi, AVo, gsmap, 0, mpicom) - else - call mct_aVect_gather(AVi, AVo, gsmap, 0, mpicom) - endif - - if (debug > 1) call t_stopf(trim(subname)//'_gath') - if (debug > 1) call t_startf(trim(subname)//'_upck') - - if (masterproc) then - lsize = size(aglobal,dim=1) - allocate(adata(lsize)) - do n2 = lb2,ub2 - write(fname,'(a1,i3.3)') 'f',n2-lb2+1 - call mct_aVect_exportIattr(AVo,trim(fname),adata,lsize) - aglobal(1:lsize) = adata(1:lsize) - enddo - deallocate(adata) - if (present(missing)) then - allocate(mvect(lsize)) - call mct_aVect_exportIattr(AVo,"mask",mvect,lsize) - do n1 = 1,lsize - if (mvect(n1) == 0) then - do n2 = lb2,ub2 - aglobal(n1) = missing - enddo - endif - enddo - deallocate(mvect) - endif - endif - - if (debug > 1) call t_stopf(trim(subname)//'_upck') - - if (masterproc) then - call mct_aVect_clean(AVo) - endif - - call mct_aVect_clean(AVi) - - call t_stopf(trim(subname)//'_total') - - end subroutine gather_1darray_int - -!----------------------------------------------------------------------- -!BOP -! -! !IROUTINE: scatter_1darray_real -! -! !INTERFACE: - subroutine scatter_1darray_real (alocal, aglobal, clmlevel) -! -! !DESCRIPTION: -! Wrapper routine to scatter real 1d array -! -! !USES: -! -! !ARGUMENTS: - implicit none - real(r8), pointer :: alocal(:) ! local data (output) - real(r8), pointer :: aglobal(:) ! global data (input) - character(len=*) ,intent(in) :: clmlevel ! type of input grid -! -! !REVISION HISTORY: -! Author: T Craig -! -! -! !LOCAL VARIABLES: -!EOP - integer :: n1,n2,lb1,ub1,lb2,ub2 ! indices - integer :: lsize ! size of local array - type(mct_aVect) :: AVi, AVo ! attribute vectors - real(r8),pointer :: adata(:) ! local data array - character(len=256) :: rstring ! real field list string - character(len=256) :: istring ! int field list string - character(len=8) :: fname ! arbitrary field name - type(mct_gsMap),pointer :: gsmap ! global seg map - character(len=*),parameter :: subname = 'scatter_1darray_real' - -!----------------------------------------------------------------------- - - call t_startf(trim(subname)//'_total') - call get_clmlevel_gsmap(clmlevel,gsmap) - - lb1 = lbound(alocal,dim=1) - ub1 = ubound(alocal,dim=1) - lb2 = 1 - ub2 = 1 - - rstring = "" - istring = "" - - do n2 = lb2,ub2 - write(fname,'(a1,i3.3)') 'f',n2-lb2+1 - if (len_trim(rstring) == 0) then - rstring = trim(fname) - else - rstring = trim(rstring)//":"//trim(fname) - endif - enddo - - if (masterproc .and. debug > 2) then - write(iulog,*) trim(subname),' strings:',trim(rstring),' ',trim(istring) - endif - - if (debug > 1) call t_startf(trim(subname)//'_pack') - - if (masterproc) then - lsize = size(aglobal,dim=1) - call mct_aVect_init(AVi,rList=trim(rstring),iList=trim(istring),lsize=lsize) - allocate(adata(lsize)) - do n2 = lb2,ub2 - adata(1:lsize) = aglobal(1:lsize) - write(fname,'(a1,i3.3)') 'f',n2-lb2+1 - call mct_aVect_importRattr(AVi,trim(fname),adata,lsize) - enddo - deallocate(adata) - endif - - if (debug > 1) call t_stopf(trim(subname)//'_pack') - if (debug > 1) call t_startf(trim(subname)//'_scat') - - call mct_aVect_scatter(AVi, AVo, gsmap, 0, mpicom) - - if (debug > 1) call t_stopf(trim(subname)//'_scat') - if (debug > 1) call t_startf(trim(subname)//'_upck') - - lsize = size(alocal,dim=1) - allocate(adata(lsize)) - do n2 = lb2,ub2 - write(fname,'(a1,i3.3)') 'f',n2-lb2+1 - call mct_aVect_exportRattr(AVo,trim(fname),adata,lsize) - do n1 = lb1,ub1 - alocal(n1) = adata(n1-lb1+1) - enddo - enddo - deallocate(adata) - - if (debug > 1) call t_stopf(trim(subname)//'_upck') - - if (masterproc) then - call mct_aVect_clean(AVi) - endif - call mct_aVect_clean(AVo) - - call t_stopf(trim(subname)//'_total') - - end subroutine scatter_1darray_real - -!----------------------------------------------------------------------- -!BOP -! -! !IROUTINE: gather_1darray_real -! -! !INTERFACE: - subroutine gather_1darray_real (alocal, aglobal, clmlevel, missing) -! -! !DESCRIPTION: -! Wrapper routine to gather real 1d array -! -! !USES: -! -! !ARGUMENTS: - implicit none - real(r8), pointer :: alocal(:) ! local data (output) - real(r8), pointer :: aglobal(:) ! global data (input) - character(len=*) ,intent(in) :: clmlevel ! type of input grid - real(r8),optional,intent(in) :: missing ! missing value -! -! !REVISION HISTORY: -! Author: T Craig -! -! -! !LOCAL VARIABLES: -!EOP - integer :: n1,n2,lb1,ub1,lb2,ub2 ! indices - integer :: lsize ! size of local array - type(mct_aVect) :: AVi, AVo ! attribute vectors - real(r8),pointer :: adata(:) ! temporary data array - integer ,pointer :: mvect(:) ! local array for mask - character(len=256) :: rstring ! real field list string - character(len=256) :: istring ! int field list string - character(len=8) :: fname ! arbitrary field name - type(mct_gsMap),pointer :: gsmap ! global seg map - character(len=*),parameter :: subname = 'gather_1darray_real' - -!----------------------------------------------------------------------- - - call t_startf(trim(subname)//'_total') - call get_clmlevel_gsmap(clmlevel,gsmap) - - lsize = size(alocal,dim=1) - lb1 = lbound(alocal,dim=1) - ub1 = ubound(alocal,dim=1) - lb2 = 1 - ub2 = 1 - - rstring = "" - istring = "" - - if (present(missing)) then - istring = "mask" - endif - - do n2 = lb2,ub2 - write(fname,'(a1,i3.3)') 'f',n2-lb2+1 - if (len_trim(rstring) == 0) then - rstring = trim(fname) - else - rstring = trim(rstring)//":"//trim(fname) - endif - enddo - - if (masterproc .and. debug > 2) then - write(iulog,*) trim(subname),' strings:',trim(rstring),' ',trim(istring) - endif - - call mct_aVect_init(AVi,rList=trim(rstring),iList=trim(istring),lsize=lsize) - - if (debug > 1) call t_startf(trim(subname)//'_pack') - allocate(adata(lsize)) - do n2 = lb2,ub2 - do n1 = lb1,ub1 - adata(n1-lb1+1) = alocal(n1) - enddo - write(fname,'(a1,i3.3)') 'f',n2-lb2+1 - call mct_aVect_importRattr(AVi,trim(fname),adata,lsize) - enddo - deallocate(adata) - - if (present(missing)) then - allocate(mvect(lsize)) - do n1 = lb1,ub1 - mvect(n1-lb1+1) = 1 - enddo - call mct_aVect_importIattr(AVi,"mask",mvect,lsize) - deallocate(mvect) - endif - - if (debug > 1) call t_stopf(trim(subname)//'_pack') - if (debug > 1) call t_startf(trim(subname)//'_gath') - - if (present(missing)) then -! tcx wait for update in mct, then get rid of "mask" -! call mct_aVect_gather(AVi, AVo, gsmap, 0, mpicom, missing = missing) - call mct_aVect_gather(AVi, AVo, gsmap, 0, mpicom) - else - call mct_aVect_gather(AVi, AVo, gsmap, 0, mpicom) - endif - - if (debug > 1) call t_stopf(trim(subname)//'_gath') - if (debug > 1) call t_startf(trim(subname)//'_upck') - - if (masterproc) then - lsize = size(aglobal,dim=1) - allocate(adata(lsize)) - do n2 = lb2,ub2 - write(fname,'(a1,i3.3)') 'f',n2-lb2+1 - call mct_aVect_exportRattr(AVo,trim(fname),adata,lsize) - aglobal(1:lsize) = adata(1:lsize) - enddo - deallocate(adata) - if (present(missing)) then - allocate(mvect(lsize)) - call mct_aVect_exportIattr(AVo,"mask",mvect,lsize) - do n1 = 1,lsize - if (mvect(n1) == 0) then - do n2 = lb2,ub2 - aglobal(n1) = missing - enddo - endif - enddo - deallocate(mvect) - endif - endif - - if (debug > 1) call t_stopf(trim(subname)//'_upck') - - if (masterproc) then - call mct_aVect_clean(AVo) - endif - - call mct_aVect_clean(AVi) - - call t_stopf(trim(subname)//'_total') - - end subroutine gather_1darray_real - -end module spmdGathScatMod