diff --git a/.codecov.yml b/.codecov.yml index aa85b2b3ac..ae3b27aed3 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -8,7 +8,3 @@ coverage: default: threshold: 100% base: parent -comment: - # This is set to the number of TCs, plus unit, but can be removed - # (i.e. set to 1) when reporting is separated from coverage. - after_n_builds: 9 diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 1fc95e9127..358d48a7a7 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -3,16 +3,13 @@ name: Code coverage on: [push, pull_request] jobs: - build-test-nans: + build-coverage: runs-on: ubuntu-latest defaults: run: working-directory: .testing - env: - REPORT_COVERAGE: true - steps: - uses: actions/checkout@v2 with: @@ -23,13 +20,29 @@ jobs: - uses: ./.github/actions/testing-setup - name: Compile unit testing - run: make -j build/unit/MOM6 + run: make -j build/unit/MOM_unit_tests - name: Run unit tests - run: make unit.cov.upload + run: make run.cov.unit + + - name: Report unit test coverage to CI (PR) + if: github.event_name == 'pull_request' + run: make report.cov.unit REQUIRE_COVERAGE_UPLOAD=true + + - name: Report unit test coverage to CI (Push) + if: github.event_name != 'pull_request' + run: make report.cov.unit - - name: Compile MOM6 with code coverage + - name: Compile ocean-only MOM6 with code coverage run: make -j build/cov/MOM6 - - name: Run and post coverage - run: make run.cov -k -s + - name: Run coverage tests + run: make -j -k run.cov + + - name: Report coverage to CI (PR) + if: github.event_name == 'pull_request' + run: make report.cov REQUIRE_COVERAGE_UPLOAD=true + + - name: Report coverage to CI (Push) + if: github.event_name != 'pull_request' + run: make report.cov diff --git a/.testing/Makefile b/.testing/Makefile index 972c213032..150a365692 100644 --- a/.testing/Makefile +++ b/.testing/Makefile @@ -8,27 +8,24 @@ # Run the test suite, defined in the `tc` directores. # # make clean -# Wipe the MOM6 test executables -# (NOTE: This does not delete FMS in the `deps`) +# Delete the MOM6 test executables and dependency builds (FMS) +# +# make clean.build +# Delete only the MOM6 test executables # # # Configuration: # These settings can be provided as either command-line flags, or saved in a # `config.mk` file. # -# Experiment Configuration: -# BUILDS Executables to be built by `make` or `make all` -# CONFIGS Model configurations to test (default: `tc*`) -# TESTS Tests to run -# DIMS Dimensional scaling tests -# (NOTE: Each test will build its required executables, regardless of BUILDS) -# # General test configuration: -# FRAMEWORK Model framework (fms1 or fms2) # MPIRUN MPI job launcher (mpirun, srun, etc) +# FRAMEWORK Model framework (fms1 or fms2) # DO_REPRO_TESTS Enable production ("repro") testing equivalence # DO_REGRESSION_TESTS Enable regression tests (usually dev/gfdl) -# REPORT_COVERAGE Enable code coverage and generate reports +# DO_COVERAGE Enable code coverage and generate .gcov reports +# DO_PROFILE Enable performance profiler comparison tests +# REQUIRE_CODECOV_UPLOAD Abort as error if upload to codecov.io fails. # # Compiler configuration: # CC C compiler @@ -43,6 +40,16 @@ # FCFLAGS_OPT Aggressive optimization compiler flags # FCFLAGS_INIT Variable initialization flags # FCFLAGS_COVERAGE Code coverage flags +# FCFLAGS_FMS FMS build flags (default: FCFLAGS_DEBUG) +# +# LDFLAGS_COVERAGE Linker coverage flags +# LDFLAGS_USER User-defined linker flags (used for all MOM/FMS builds) +# +# Experiment Configuration: +# BUILDS Executables to be built by `make` or `make all` +# CONFIGS Model configurations to test (default: `tc*`) +# TESTS Tests to run +# DIMS Dimensional scaling tests # # Regression repository ("target") configuration: # MOM_TARGET_SLUG URL slug (minus domain) of the target repo @@ -78,48 +85,48 @@ export FC export MPIFC # Builds are distinguished by FCFLAGS -# NOTE: FMS will be built using FCFLAGS_DEBUG FCFLAGS_DEBUG ?= -g -O0 FCFLAGS_REPRO ?= -g -O2 FCFLAGS_OPT ?= -g -O3 -mavx -fno-omit-frame-pointer -FCFLAGS_COVERAGE ?= -g -O0 -fbacktrace --coverage FCFLAGS_INIT ?= +FCFLAGS_COVERAGE ?= -g -O0 -fbacktrace --coverage +FCFLAGS_FMS ?= $(FCFLAGS_DEBUG) # Additional notes: # - These default values are simple, minimalist flags, supported by nearly all -# compilers, and are comparable to GFDL's canonical DEBUG and REPRO builds. +# compilers, and are somewhat analogous to GFDL's DEBUG and REPRO builds. # -# - These flags should be configured outside of the Makefile, either with +# - These flags can be configured outside of the Makefile, either with # config.mk or as environment variables. -# -# - FMS cannot be built with the same aggressive initialization flags as MOM6, -# so FCFLAGS_INIT is used to provide additional MOM6 configuration. -# User-defined LDFLAGS (applied to all builds and FMS) LDFLAGS_COVERAGE ?= --coverage LDFLAGS_USER ?= # Set to `true` to require identical results from DEBUG and REPRO builds -# NOTE: Many compilers (Intel, GCC on ARM64) do not yet produce identical -# results across DEBUG and REPRO builds (as defined below), so we disable on +# NOTE: Many compilers (Intel, GCC on ARM64) do not produce identical results +# across DEBUG and REPRO builds (as defined below), so we disable on # default. DO_REPRO_TESTS ?= +# Enable profiling +DO_PROFILE ?= + +# Enable code coverage runs +DO_COVERAGE ?= + +# Report failure if coverage report is not uploaded +REQUIRE_COVERAGE_UPLOAD ?= + # Time measurement (configurable by the CI) TIME ?= time # Experiment configuration -BUILDS ?= symmetric asymmetric openmp +BUILDS ?= symmetric/MOM6 asymmetric/MOM6 openmp/MOM6 CONFIGS ?= $(wildcard tc*) TESTS ?= grid layout rotate restart openmp nan $(foreach d,$(DIMS),dim.$(d)) DIMS ?= t l h z q r -#--- -# Dependencies -DEPS = deps - - #--- # Test configuration @@ -127,29 +134,29 @@ DEPS = deps # the DEBUG results in older GCCs and vendor compilers, so we can optionally # disable them. ifeq ($(DO_REPRO_TESTS), true) - BUILDS += repro + BUILDS += repro/MOM6 TESTS += repro endif # Profiling +ifeq ($(DO_PROFILE), true) + BUILDS += opt/MOM6 opt_target/MOM6 +endif + +# Unit testing +UNIT_EXECS ?= MOM_unit_tests +ifeq ($(DO_COVERAGE), true) + BUILDS += cov/MOM6 $(foreach e, $(UNIT_EXECS), unit/$(e)) +endif + ifeq ($(DO_PROFILE), false) - BUILDS += opt opt_target + BUILDS += opt/MOM6 opt_target/MOM6 endif -# Unit test testing -BUILDS += cov unit -# The following variables are configured by Travis: -# DO_REGRESSION_TESTS: true if $(TRAVIS_PULL_REQUEST) is a PR number -# MOM_TARGET_SLUG: TRAVIS_REPO_SLUG -# MOM_TARGET_LOCAL_BRANCH: TRAVIS_BRANCH -# These are set to true by our Travis configuration if testing a pull request DO_REGRESSION_TESTS ?= -REPORT_COVERAGE ?= -CODECOV_UPLOADER_URL ?= https://uploader.codecov.io/latest/linux/codecov - ifeq ($(DO_REGRESSION_TESTS), true) - BUILDS += target + BUILDS += target/MOM6 TESTS += regression MOM_TARGET_SLUG ?= NOAA-GFDL/MOM6 @@ -180,7 +187,7 @@ TARGET_SOURCE = $(call SOURCE,build/target_codebase/src) \ $(wildcard build/target_codebase/config_src/infra/FMS1/*.F90) \ $(wildcard build/target_codebase/config_src/drivers/solo_driver/*.F90) \ $(wildcard build/target_codebase/config_src/ext*/*.F90) -FMS_SOURCE = $(call SOURCE,$(DEPS)/fms/src) +FMS_SOURCE = $(call SOURCE,deps/fms/src) #--- @@ -210,13 +217,12 @@ endif # Rules .PHONY: all build.regressions build.prof -all: $(foreach b,$(BUILDS),build/$(b)/MOM6) $(VENV_PATH) +all: $(foreach b,$(BUILDS),build/$(b)) $(VENV_PATH) build.regressions: $(foreach b,symmetric target,build/$(b)/MOM6) build.prof: $(foreach b,opt opt_target,build/$(b)/MOM6) # Executable -BUILD_TARGETS = MOM6 Makefile path_names -.PRECIOUS: $(foreach b,$(BUILDS),$(foreach f,$(BUILD_TARGETS),build/$(b)/$(f))) +.PRECIOUS: $(foreach b,$(BUILDS),build/$(b)) # Compiler flags @@ -224,22 +230,22 @@ BUILD_TARGETS = MOM6 Makefile path_names # .testing dependencies # TODO: We should probably build TARGET with the FMS that it was configured # to use. But for now we use the same FMS over all builds. -FCFLAGS_FMS = -I../../$(DEPS)/include -LDFLAGS_FMS = -L../../$(DEPS)/lib -PATH_FMS = PATH="${PATH}:../../$(DEPS)/bin" +FCFLAGS_DEPS = -I../../deps/include +LDFLAGS_DEPS = -L../../deps/lib +PATH_DEPS = PATH="${PATH}:../../deps/bin" # Define the build targets in terms of the traditional DEBUG/REPRO/etc labels -SYMMETRIC_FCFLAGS := FCFLAGS="$(FCFLAGS_DEBUG) $(FCFLAGS_INIT) $(FCFLAGS_FMS)" -ASYMMETRIC_FCFLAGS := FCFLAGS="$(FCFLAGS_DEBUG) $(FCFLAGS_INIT) $(FCFLAGS_FMS)" -REPRO_FCFLAGS := FCFLAGS="$(FCFLAGS_REPRO) $(FCFLAGS_FMS)" -OPT_FCFLAGS := FCFLAGS="$(FCFLAGS_OPT) $(FCFLAGS_FMS)" -OPENMP_FCFLAGS := FCFLAGS="$(FCFLAGS_DEBUG) $(FCFLAGS_INIT) $(FCFLAGS_FMS)" -TARGET_FCFLAGS := FCFLAGS="$(FCFLAGS_DEBUG) $(FCFLAGS_INIT) $(FCFLAGS_FMS)" -COV_FCFLAGS := FCFLAGS="$(FCFLAGS_COVERAGE) $(FCFLAGS_FMS)" +SYMMETRIC_FCFLAGS := FCFLAGS="$(FCFLAGS_DEBUG) $(FCFLAGS_INIT) $(FCFLAGS_DEPS)" +ASYMMETRIC_FCFLAGS := FCFLAGS="$(FCFLAGS_DEBUG) $(FCFLAGS_INIT) $(FCFLAGS_DEPS)" +REPRO_FCFLAGS := FCFLAGS="$(FCFLAGS_REPRO) $(FCFLAGS_DEPS)" +OPT_FCFLAGS := FCFLAGS="$(FCFLAGS_OPT) $(FCFLAGS_DEPS)" +OPENMP_FCFLAGS := FCFLAGS="$(FCFLAGS_DEBUG) $(FCFLAGS_INIT) $(FCFLAGS_DEPS)" +TARGET_FCFLAGS := FCFLAGS="$(FCFLAGS_DEBUG) $(FCFLAGS_INIT) $(FCFLAGS_DEPS)" +COV_FCFLAGS := FCFLAGS="$(FCFLAGS_COVERAGE) $(FCFLAGS_DEPS)" -MOM_LDFLAGS := LDFLAGS="$(LDFLAGS_FMS) $(LDFLAGS_USER)" -COV_LDFLAGS := LDFLAGS="$(LDFLAGS_COVERAGE) $(LDFLAGS_FMS) $(LDFLAGS_USER)" +MOM_LDFLAGS := LDFLAGS="$(LDFLAGS_DEPS) $(LDFLAGS_USER)" +COV_LDFLAGS := LDFLAGS="$(LDFLAGS_COVERAGE) $(LDFLAGS_DEPS) $(LDFLAGS_USER)" # Environment variable configuration @@ -279,19 +285,19 @@ build/opt_target/Makefile: | $(TARGET_CODEBASE) # NOTE: ./configure is too much, but Makefile is not enough! # Ideally we only want to re-run both Makefile and mkmf, but the mkmf call # is inside ./configure, so we must re-run ./configure as well. -$(foreach b,$(filter-out target,$(BUILDS)),build/$(b)/Makefile): $(MOM_SOURCE) build/target_codebase/configure: $(TARGET_SOURCE) -# Build MOM6 -.PRECIOUS: $(foreach b,$(BUILDS),build/$(b)/MOM6) -build/%/MOM6: build/%/Makefile +# Build executables +$(foreach e,$(UNIT_EXECS),build/unit/$(e)): build/unit/Makefile $(MOM_SOURCE) + cd $(@D) && $(TIME) $(MAKE) -j +build/%/MOM6: build/%/Makefile $(MOM_SOURCE) cd $(@D) && $(TIME) $(MAKE) -j # Use autoconf to construct the Makefile for each target -.PRECIOUS: $(foreach b,$(BUILDS),build/$(b)/Makefile) -build/%/Makefile: ../ac/configure ../ac/Makefile.in $(DEPS)/lib/libFMS.a +.PRECIOUS: build/%/Makefile +build/%/Makefile: ../ac/configure ../ac/Makefile.in deps/lib/libFMS.a mkdir -p $(@D) cd $(@D) \ && $(MOM_ENV) ../../../ac/configure $(MOM_ACFLAGS) --with-framework=$(FRAMEWORK) \ @@ -304,7 +310,7 @@ build/%/Makefile: ../ac/configure ../ac/Makefile.in $(DEPS)/lib/libFMS.a # Fetch the regression target codebase build/target/Makefile build/opt_target/Makefile: \ - $(TARGET_CODEBASE)/ac/configure $(DEPS)/lib/libFMS.a + $(TARGET_CODEBASE)/ac/configure deps/lib/libFMS.a mkdir -p $(@D) cd $(@D) \ && $(MOM_ENV) ../../$(TARGET_CODEBASE)/ac/configure $(MOM_ACFLAGS) \ @@ -324,32 +330,31 @@ $(TARGET_CODEBASE): # FMS # Set up the FMS build environment variables -FMS_ENV = PATH="${PATH}:$(realpath ../ac)" FCFLAGS="$(FCFLAGS_DEBUG)" +FMS_ENV = PATH="${PATH}:$(realpath ../ac)" FCFLAGS="$(FCFLAGS_FMS)" -# TODO: *.mod dependencies? -$(DEPS)/lib/libFMS.a: $(DEPS)/fms/build/libFMS.a - $(MAKE) -C $(DEPS) lib/libFMS.a +deps/lib/libFMS.a: deps/fms/build/libFMS.a + $(MAKE) -C deps lib/libFMS.a -$(DEPS)/fms/build/libFMS.a: $(DEPS)/fms/build/Makefile - $(MAKE) -C $(DEPS) fms/build/libFMS.a +deps/fms/build/libFMS.a: deps/fms/build/Makefile + $(MAKE) -C deps fms/build/libFMS.a -$(DEPS)/fms/build/Makefile: $(DEPS)/fms/src/configure $(DEPS)/Makefile.fms.in - $(FMS_ENV) $(MAKE) -C $(DEPS) fms/build/Makefile +deps/fms/build/Makefile: deps/fms/src/configure deps/Makefile.fms.in + $(FMS_ENV) $(MAKE) -C deps fms/build/Makefile -$(DEPS)/Makefile.fms.in: ../ac/deps/Makefile.fms.in $(DEPS)/Makefile - cp $< $(DEPS) +deps/Makefile.fms.in: ../ac/deps/Makefile.fms.in deps/Makefile + cp $< deps # TODO: m4 dependencies? -$(DEPS)/fms/src/configure: ../ac/deps/configure.fms.ac $(DEPS)/Makefile $(FMS_SOURCE) | $(DEPS)/fms/src - cp ../ac/deps/configure.fms.ac $(DEPS) - cp -r ../ac/deps/m4 $(DEPS) - $(MAKE) -C $(DEPS) fms/src/configure +deps/fms/src/configure: ../ac/deps/configure.fms.ac deps/Makefile $(FMS_SOURCE) | deps/fms/src + cp ../ac/deps/configure.fms.ac deps + cp -r ../ac/deps/m4 deps + $(MAKE) -C deps fms/src/configure -$(DEPS)/fms/src: $(DEPS)/Makefile - make -C $(DEPS) fms/src +deps/fms/src: deps/Makefile + make -C deps fms/src # Dependency init -$(DEPS)/Makefile: ../ac/deps/Makefile +deps/Makefile: ../ac/deps/Makefile mkdir -p $(@D) cp $< $@ @@ -362,15 +367,18 @@ $(DEPS)/Makefile: ../ac/deps/Makefile # TODO: # - Avoid re-building FMS and MOM6 src by re-using existing object/mod files # - Use autoconf rather than mkmf templates -MK_TEMPLATE ?= ../../$(DEPS)/mkmf/templates/ncrc-gnu.mk +MK_TEMPLATE ?= ../../deps/mkmf/templates/ncrc-gnu.mk + # NUOPC driver build/nuopc/mom_ocean_model_nuopc.o: build/nuopc/Makefile cd $(@D) && make $(@F) check_mom6_api_nuopc: build/nuopc/mom_ocean_model_nuopc.o + # GFDL coupled driver build/coupled/ocean_model_MOM.o: build/coupled/Makefile cd $(@D) && make $(@F) check_mom6_api_coupled: build/coupled/ocean_model_MOM.o + # MCT driver build/mct/mom_ocean_model_mct.o: build/mct/Makefile cd $(@D) && make $(@F) @@ -442,8 +450,8 @@ $(eval $(call CONFIG_RULE,tc3,grid)) # Color highlights for test results RED = \033[0;31m -YELLOW = \033[0;33m GREEN = \033[0;32m +YELLOW = \033[0;33m MAGENTA = \033[0;35m RESET = \033[0m @@ -544,7 +552,6 @@ $(foreach c,$(CONFIGS),$(eval $(call CONFIG_DIM_RULE,$(c)))) define STAT_RULE work/%/$(1)/ocean.stats work/%/$(1)/chksum_diag: build/$(2)/MOM6 $(VENV_PATH) @echo "Running test $$*.$(1)..." - if [ $(3) ]; then find build/$(2) -name *.gcda -exec rm -f '{}' \; ; fi mkdir -p $$(@D) cp -RL $$*/* $$(@D) if [ -f $$(@D)/Makefile ]; then \ @@ -571,20 +578,32 @@ work/%/$(1)/ocean.stats work/%/$(1)/chksum_diag: build/$(2)/MOM6 $(VENV_PATH) mkdir -p results/$$* ; \ cd build/$(2) ; \ gcov -b *.gcda > gcov.$$*.$(1).out ; \ - curl -s $(CODECOV_UPLOADER_URL) -o codecov ; \ - chmod +x codecov ; \ - ./codecov -R . -Z -f "*.gcov" -n $$@ \ - > codecov.$$*.$(1).out \ - 2> codecov.$$*.$(1).err \ - && echo -e "${MAGENTA}Report uploaded to codecov.${RESET}"; \ + find -name "*.gcov" -exec sed -i -r 's/^( *[0-9]*)\*:/ \1:/g' {} \; ; \ fi endef +# Upload coverage reports +CODECOV_UPLOADER_URL ?= https://uploader.codecov.io/latest/linux/codecov +codecov: + curl -s $(CODECOV_UPLOADER_URL) -o $@ + chmod +x codecov + +.PHONY: report.cov +report.cov: run.cov codecov + ./codecov -R build/cov -Z -f "*.gcov" \ + > build/cov/codecov.out \ + 2> build/cov/codecov.err \ + && echo -e "${MAGENTA}Report uploaded to codecov.${RESET}" \ + || { \ + echo -e "${RED}Failed to upload report.${RESET}" ; \ + if [ "$(REQUIRE_COVERAGE_UPLOAD)" = true ] ; then false ; fi ; \ + } + # Define $(,) as comma escape character , := , -$(eval $(call STAT_RULE,symmetric,symmetric,$(REPORT_COVERAGE),,,1)) +$(eval $(call STAT_RULE,symmetric,symmetric,,,,1)) $(eval $(call STAT_RULE,asymmetric,asymmetric,,,,1)) $(eval $(call STAT_RULE,target,target,,,,1)) $(eval $(call STAT_RULE,repro,repro,,,,1)) @@ -599,7 +618,7 @@ $(eval $(call STAT_RULE,dim.z,symmetric,,Z_RESCALE_POWER=11,,1)) $(eval $(call STAT_RULE,dim.q,symmetric,,Q_RESCALE_POWER=11,,1)) $(eval $(call STAT_RULE,dim.r,symmetric,,R_RESCALE_POWER=11,,1)) -$(eval $(call STAT_RULE,cov,cov,$(REPORT_COVERAGE),,,1)) +$(eval $(call STAT_RULE,cov,cov,true,,,1)) # Generate the half-period input namelist as follows: # 1. Fetch DAYMAX and TIMEUNIT from MOM_input @@ -676,10 +695,11 @@ test.summary: #--- -# unit test +# Unit test -.PHONY: unit.cov -unit.cov: build/unit/MOM_new_unit_tests.gcov +# NOTE: Using file parser gcov report as a proxy for test completion +.PHONY: run.cov.unit +run.cov.unit: build/unit/MOM_file_parser_tests.F90.gcov work/unit/std.out: build/unit/MOM_unit_tests if [ $(REPORT_COVERAGE) ]; then \ @@ -700,31 +720,28 @@ work/unit/std.out: build/unit/MOM_unit_tests cat p2.std.err | tail -n 100 ; \ ) -build/unit/codecov: - mkdir -p $(@D) - cd $(@D) \ - && curl -s $(CODECOV_UPLOADER_URL) -o $(@F) - chmod +x $@ - -# Use driver coverage file as a proxy for the run +# NOTE: .gcov actually depends on .gcda, but .gcda is produced with std.out # TODO: Replace work/unit/std.out with *.gcda? -build/unit/MOM_new_unit_tests.gcov: work/unit/std.out - mkdir -p $(@D) +build/unit/MOM_file_parser_tests.F90.gcov: work/unit/std.out cd $(@D) \ && gcov -b *.gcda > gcov.unit.out + find $(@D) -name "*.gcov" -exec sed -i -r 's/^( *[0-9]*)\*:/ \1:/g' {} \; + +.PHONY: report.cov.unit +report.cov.unit: build/unit/MOM_file_parser_tests.F90.gcov codecov + ./codecov -R build/unit -f "*.gcov" -Z -n "Unit tests" \ + > build/unit/codecov.out \ + 2> build/unit/codecov.err \ + && echo -e "${MAGENTA}Report uploaded to codecov.${RESET}" \ + || { \ + echo -e "${RED}Failed to upload report.${RESET}" ; \ + if [ "$(REQUIRE_COVERAGE_UPLOAD)" = true ] ; then false ; fi ; \ + } -# Use driver coverage file as a proxy for the run -.PHONY: unit.cov.upload -unit.cov.upload: build/unit/MOM_new_unit_tests.gcov build/unit/codecov - cd build/unit \ - && ./codecov -R . -Z -f "*.gcov" -n "Unit tests" \ - > codecov.unit.out \ - 2> codecov.unit.err \ - && echo -e "${MAGENTA}Report uploaded to codecov.${RESET}" #--- -# Profiling -# XXX: This is experimental work to track, log, and report changes in runtime +# Profiling based on FMS clocks + PCONFIGS = p0 .PHONY: profile @@ -748,8 +765,9 @@ work/p0/%/std.out: cd $(@D) \ && $(MPIRUN) -n 1 ../../../$< 2> std.err > std.out + #--- -# Same but with perf +# Profiling based on perf output # TODO: This expects the -e flag, can I handle it in the command? PERF_EVENTS ?= diff --git a/.testing/README.md b/.testing/README.md deleted file mode 100644 index ef02bcfa09..0000000000 --- a/.testing/README.md +++ /dev/null @@ -1,277 +0,0 @@ -# .testing - -This directory contains the Makefile and test configurations used to evaluate -submissions to the MOM6 codebase. The tests are designed to run either locally -or in a CI environment such as Travis. - - -## Overview - -This section gives a very brief overview of the test suite and how to use it. - -To build and run the model tests: -``` -make -j -make -j test -``` -For new users, the default configuration should be suitable for most platforms. -If not, then the following options may need to be configured. - -`MPIRUN` (*default:* `mpirun`) - - Name of the MPI launcher. Often this is `mpirun` or `mpiexec` but may all - need to run through a scheduler, e.g. `srun` if using Slurm. - -`DO_REGRESSION_TESTS` (*default: none*) - - Set to `true` to compare output with `dev/gfdl`. - -`DO_REPRO_TESTS` (*default: none*) - - Set to `true` to compare DEBUG and REPRO builds, which typically correspond - to unoptimized and optimized builds. See TODO for more information. - -These settings can either be specified at the command line, as shown below -``` -make DO_REGRESSION_TESTS=true -make test DO_REGRESSION_TESTS=true -``` -or saved in a configuration file, `config.mk`. - -To run individual classes of tests, use the subclass name: -``` -make test.grids -make test.layouts -make DO_REGRESSION_TESTS=true test.regressions -``` -To test an individual test configuration (TC): -``` -make tc0.grid -``` -See "Tests" and "Test Configurations" for the complete list of tests. - -The rest of the document describes the test suite in more detail, including -names and descriptions of the test classes and configurations. - - -## Testing overview - -The test suite checks for numerical consistency of the model output across -different model configurations when subjected to relevant numerical and -mathematical transformations, such as grid layout or dimensional rescaling. If -the model state is unchanged after each transformation, then the test is -reported as passing. Any discrepancy in the model state causes the test to -fail. - -Model state is currently defined by the `ocean.stats` output file, which -reports the total energy (per unit mass) at machine precision alongside similar -global metrics at lower precision, such as mass or mean sea level. - -Diagnostics are based on the MOM checksum function, which includes the mean, -minimum, and maximum values, alongside a bitcount checksum, in the physical -domain, which are saved in the `chksum_diag` output file. - - -## Build configuration - -The test suite defines a DEBUG and a REPRO build, which resemble targets used -at GFDL. The DEBUG build is intended for detecting potential errors and -troubleshooting, while the REPRO build has typically been optimized for -production runs. - -Ideally, the DEBUG and REPRO runs will produce identical results, although this -is often not the case for many compilers and platforms. The `DO_REPRO_TEST` -flag is used to test DEBUG/REPRO equivalency. - -The following options are provided to configure your compiler flags. - -`FCFLAGS_DEBUG` (*default:* `-g -O0`) - - Specify the flags used in the DEBUG build. These are the flags used for all - tests excepting the REPRO builds. They are also used to build the FMS - library. - - These should be used to enable settings favorable to debugging, such as no - optimizations, backtraces, range checking, and warnings. - - For more aggressive debugging flags which cannot be used with FMS, see - `FCFLAGS_INIT`. - -`FCFLAGS_REPRO:` (*default:* `-g -O2`) - - Specify the optimized reproducible run, typically used in production runs. - - Ideally, this should consist of optimization flags which improve peformance - but do not change model output. In practice, this is difficult to achieve, - and should only used in certain environments. - -`FCFLAGS_INIT` (*default: none*) - - This flag was historically used to specify variable initialization, such as - nonzero integers or floating point values, and is still generally used for - this purpose. - - As implemented, it is used for all MOM6 builds. It is not used for FMS - builds, so can also act as a debugging flag independent of FMS. - -`FCFLAGS_COVERAGE` (*default: none*) - - This flag is used to define a build which supports some sort of code - coverage, often one which is handled by the CI. - - For many compilers, this is set to `--coverage`, and is applied to both the - compiler (`FCFLAGS`) and linker (`LDFLAGS`). - -Example values used by GFDL and Travis for the GFortran compiler are shown -below. -``` -FCFLAGS_DEBUG="-g -O0 -Wextra -Wno-compare-reals -fbacktrace -ffpe-trap=invalid,zero,overflow -fcheck=bounds" -FCFLAGS_REPRO="-g -O2 -fbacktrace" -FCFLAGS_INIT="-finit-real=snan -finit-integer=2147483647 -finit-derived" -FCFLAGS_COVERAGE="--coverage" -``` - -Note that the default values for these flags are very minimal, in order to -ensure compatibility over the largest possible range of compilers. - -Like all configuration variables, these can be specified in a `config.mk` file. - - -## Building the executables - -Run `make` to build the test executables. -``` -make -``` -This will fetch the MKMF build toolchain, fetch and compile the FMS framework -library, and compile the executables used in the test suite. The default -configuration uses the symmetric grid in the debug-compile mode, with -optimizations disabled and stronger quality controls. The following -executables will be created. - -- `build/symmetric/MOM6`: Symmetric grid configuration (i.e. extended grids - along western and/or southern boundaries for selected fields). This is the - default configuration. - -- `build/asymmetric/MOM6`: Non-symmetric grid (equal-sized grids) - -- `build/repro/MOM6`: Optimized reproducible mode - -- `build/target/MOM6`: A reference build for regression testing - -- `build/openmp/MOM6`: OpenMP-enabled build - -The `target` and `repro` builds are only created when their respective tests -are set to `true`. - - -### Regression testing - -When regression tests are enabled, the Makefile will check out a second copy of -the repository from a specified URL and branch given by `MOM_TARGET_URL` and -`MOM_TARGET_BRANCH`, respectively. The code is checked out into the -`TARGET_CODEBASE` directory. - -The default settings, with resolved values as comments, are shown below. -``` -MOM_TARGET_SLUG = NOAA-GFDL/MOM6 -MOM_TARGET_URL = https://github.com/$(MOM_TARGET_SLUG) - #= https://github.com/NOAA-GFDL/MOM6 -MOM_TARGET_LOCAL_BRANCH = dev/gfdl -MOM_TARGET_BRANCH = origin/$(MOM_TARGET_LOCAL_BRANCH) - #= origin/dev/gfdl -TARGET_CODEBASE = $(BUILD)/target_codebase -``` -These default values can be configured to target a particular development -branch. - -Currently the target can only be specifed by branch name, rather than hash. - -New diagnostics do not report as a fail, and are not tracked by any CIs, but -the test will report a warning to the user. - - -## Tests - -Using `test` will run through the full test suite. -``` -make test -``` -The tests are gathered into the following groups. - -- `test.regressions`: Regression tests relative to a code state (when enabled) -- `test.grids`: Symmetric vs nonsymmetric grids -- `test.layouts`: Domain decomposition, based on parallelization -- `test.restarts`: Resubmission by restarts -- `test.repros`: Optimized (REPRO) and unoptimized (DEBUG) compilation -- `test.nans`: NaN initialization of allocated arrays -- `test.dims`: Dimensional scaling (length, time, thickness, depth) - -Each group of tests can also be run individually, such as in the following -example. -``` -make test.grids -``` - -Each configuration is tested relative to the `symmetric` build, and reports a -fail if the answers differ from this build. - - -## Test configurations - -The following model test configurations (TCs) are supported, and are based on -configurations in the MOM6-examples repository. - -- `tc0`: Unit testing of various model components, based on `unit_tests` -- `tc1`: A low-resolution version of the `benchmark` configuration - - `tc1.a`: Use the un-split mode with Runge-Kutta 3 time integration - - `tc1.b`: Use the un-split mode with Runge-Kutta 2 time integration -- `tc2`: An ALE configuration based on tc1 with tides - - `tc2.a`: Use sigma, PPM_H4 and no tides -- `tc3`: An open-boundary condition (OBC) test based on `circle_obcs` -- `tc4`: Sponges and initialization using I/O - - -## Code coverage - -Code coverage reports the lines of code which have been tested, and can be used -to determine if a particular section is untested. - -Coverage is measured using `gcov` and is reported for TCs using the `symmetric` -executable. - -Coverage reporting is optionally uploaded to the `codecov.io` site. -``` -https://codecov.io/gh/NOAA-GFDL/MOM6 -``` -This is disabled on default, but can be enabled by the `REPORT_COVERAGE` flag. -``` -make test REPORT_COVERAGE=true -``` -Note that any uploads will require a valid CodeCov token. - - -## Running on Travis - -Whenever code is pushed to GitHub or a pull request (PR) is created, the test -suite is triggered and the code changes are tested. - -When the tests are run on Travis, the following variables are re-defined: - -- `DO_REPRO_TESTS` is set to `true` for all tests. - -- `DO_REGRESSION_TESTS` is set to `true` for a PR submission, and is unset for - code pushes. - -- `MOM_TARGET_SLUG` is set to `TRAVIS_REPO_SLUG`, the URL stub of the model to - be built. - - For submissions to NOAA-GFDL, this will be set to `NOAA-GFDL/MOM6` and the - reference URL will be `https://github.com/NOAA-GFDL/MOM6`. - -- `MOM_TARGET_LOCAL_BRANCH` is set to `TRAVIS_BRANCH`. - - For a code push, this is set to the name of the active branch at GitHub. For - a PR, this is the name of the branch which is receiving the PR. - -- `REPORT_COVERAGE` is set to `true`. diff --git a/.testing/README.rst b/.testing/README.rst new file mode 100644 index 0000000000..5bab076707 --- /dev/null +++ b/.testing/README.rst @@ -0,0 +1,371 @@ +=============== +MOM6 Test Suite +=============== + +This directory contains test configurations used to evaluate submissions to the +MOM6 codebase. The tests are designed to run either locally or in a CI +environment. + + +Usage +===== + +``make -j`` + Build the FMS library and test executables. + +``make -j test`` + Run the test suite, defined in the ``tc`` directores. + +``make clean.build`` + Delete only the MOM6 test executables. + +``make clean`` + Delete the MOM6 test executables and dependency builds (FMS). + + +Configuration +============= + +The test suite includes many configuration flags and variables which can be +configured at either the command line, or can be stored in a ``config.mk`` +file. + +Several of the following may require configuration for particular systems. + +``MPIRUN`` (*default:* ``mpirun``) + Name of the MPI launcher. Often this is ``mpirun`` or ``mpiexec`` but may + all need to run through a scheduler, e.g. ``srun`` if using Slurm. + +``FRAMEWORK`` (*default:* ``fms1``) + Select either the legacy FMS framework (``fms1``) or an FMS2 I/O compatible + version (``fms2``). + +``DO_REPRO_TESTS`` (*default:* *none*) + Set to ``true`` to test the REPRO build and confirm equivalence of DEBUG and + REPRO builds. + + For compilers with aggressive optimization, DEBUG and REPRO may not produce + identical results and this test should not be used. + +``DO_REGRESSION_TESTS`` (*default:* *none*) + Set to ``true`` to compare output with a defined target branch, set by + ``MOM_TARGET_LOCAL_BRANCH``. (NOTE: This defaults to ``dev/gfdl``). + +``DO_COVERAGE`` (*default:* *none*) + Set to ``true`` to enable code coverage. Currently only configured for + ``gcov``. + +``REQUIRE_COVERAGE_UPLOAD`` (*default:* *none*) + Set to ``true`` if failure to upload the coverage report to codecov.io + should result in an error. This should only be enabled if codecov.io has + already been configured for the user, or by a supporting CI. + +``DO_PROFILE`` (*default:* *none*) + Set to ``true`` to enable performance profile monitoring. Models are + compiled using ``OPT_FCFLAGS`` (see below) and performance of various + functions are reported and compared to the target branch. + + Results from these tests should only be considered if the platform has been + configure for benchmarking. + + +Build configuration +------------------- + +Compilation is controlled with the following variables. Defaults are chosen +for the widest compatibility across platforms. Users should modify these to +reflect their own needs. + +``FCFLAGS_DEBUG`` (*default:* ``-g -O0``) + The "DEBUG" build, for rapid compilation and debugging. + +``FCFLAGS_REPRO`` (*default:* ``-g -O2``) + The "REPRO" build, for reproducible production runs. + +``FCFLAGS_OPT`` (*default:* ``-g -O3``) + The "OPT" build, for aggressive optimization and profiling. + +``FCFLAGS_COVERAGE`` (*default:* ``-g -O0 -fbacktrace --coverage``) + Flags used for producing code coverage reports. Defaults are for gcc, + although ``--coverage`` is relatively common across compilers. + +``FCFLAGS_INIT`` (*default:* *none*) + A placeholder flag for aggressive initialization testing. This is appended + to existing flags (usually ``FCFLAGS_DEBUG``). + +``FCFLAGS_FMS`` (*default:* ``FCFLAGS_DEBUG``) + Compiler flags used for the supporting FMS library. In most cases, it is + sufficient to use ``FCFLAGS_DEBUG``. + +``LDFLAGS_COVERAGE`` (*default:* ``--coverage``) + Linker flags to enable coverage. + +``LDFLAGS_USER`` (*default:* *none*) + A placeholder for supplemental linker flags, such as an external library not + configured by autoconf. + +The following flags are passed as environment variables to other Makefiles. + +``FC``, ``MPIFC`` + The Fortran compiler and its MPI wrapper. + +``CC``, ``MPICC`` + The C compiler and its MPI wrapper. This is primarily used by FMS, but may + be used in some MOM6 autoconf tests. + +If unset, these will be configured by autoconf or from the user's environment +variables. + +Additional settings for particular tasks are explained below. + + +Example ``config.mk`` +--------------------- + +An example config.mk file configured for GFortran is shown below.:: + + DO_REPRO_TESTS = true + DO_REGRESSION_TESTS = true + DO_COVERAGE = true + DO_PROFILE = true + + FCFLAGS_DEBUG = -g -O0 -Wextra -Wno-compare-reals -fbacktrace -ffpe-trap=invalid,zero,overflow -fcheck=bounds + FCFLAGS_REPRO = -g -O2 -fbacktrace + FCFLAGS_OPT = -g -O3 -mavx -mfma + FCFLAGS_INIT = -finit-real=snan -finit-integer=2147483647 -finit-derived + FCFLAGS_COVERAGE = --coverage + +The file follows Makefile syntax, so quotations are generally not required and +spaces are permitted between assignment operators (``=``). + + +Builds +====== + +Run ``make`` to build the test executables.:: + + $ make + +This will fetch external dependencies, compile the FMS framework library, and +compile the executables used in the test suite. + +The following executables will be created. + +``build/symmetric/MOM6`` + Use symmetric grids for model fields, using DEBUG flags. + + A symmetric grid is one where each finite-volume cell has grid points along + all faces. Often this results in a redundant row of points along each side + of a regular domain. + + This is the recommended production configuration, and is the reference build + for all tests in the suite. + +``build/asymmetric/MOM6`` + Use asymmetric grids for model fields. + + Asymmetric grids eliminate a redundant fields along western and southern + boundaries, which reduces the total number of points. They also ensure + that center, face, and vertex field arrays are the same size. + + The disadvantages are greater computational complexity along these + boundaries. They also do not support open boundary conditions. + + Asymmetric grids were traditionally used in many legacy ocean models. + +``build/repro/MOM6`` + Optimized build for doing reproducible runs, based REPRO flags. + + This is only built if ``DO_REPRO_TESTS`` is set to ``true``. + +``build/target/MOM6`` + A reference build for regression testing. + + The reference branch is set by ``MOM_TARGET_LOCAL_BRANCH``. This would + generally be configured by a CI to a pull request's target branch. This is + only built if ``DO_REGRESSION_TESTS`` is set to ``true``. + +``build/openmp/MOM6`` + A DEBUG build with OpenMP enabled. + + +Tests +===== + +The ``test`` rule will run all of the tests.:: + + $ make test + +Tests are based on configurations which are designed to give identical output. +When the output differs, the test reports a failure. + + +Test groups +----------- + +The tests are gathered into the following groups. + +``test.grid`` + Compare symmetric and nonsymmetric grids. + +``test.regression`` + Compare the current codebase to a target branch (e.g. ``dev/gfdl``). + +``test.layout`` + Compare a serial (one domain) and a parallel (two domain) simulation. + +``test.restart`` + Compare a single run to two runs separated by a restart. + +``test.repro`` + Compare the unoptimized (DEBUG) and optimized (REPRO) builds. + +``test.nan`` + Enable NaN-initialization of allocated (heap) arrays. + + This relies on internal features of glibc and may not work on other + platforms. + +``test.dim`` + Enable dimension rescaling tests. + +Each tests uses the symmetric build for its reference state. + +These rules can be used to run individual groups of tests.:: + + $ make test.grid + + +Test experiments +---------------- + +For each group, we test each of the following configurations, which represent +idealizations of various production experiments. + +``tc0`` + Unit testing of various model components, based on ``unit_tests`` + +``tc1`` + A low-resolution version of the ``benchmark`` configuration + + ``tc1.a`` + Use the un-split mode with Runge-Kutta 3 time integration + + ``tc1.b`` + Use the un-split mode with Runge-Kutta 2 time integration + +``tc2`` + An ALE configuration based on tc1 with tides + + ``tc2.a`` + Use sigma, PPM_H4 and no tides + +``tc3`` + An open-boundary condition (OBC) test based on ``circle_obcs`` + +``tc4`` + Sponges and initialization using I/O + + +Test procedure +-------------- + +The test suite checks for numerical consistency of the model output across +different model configurations when subjected to relevant numerical and +mathematical transformations, such as grid layout or dimensional rescaling. If +the model state is unchanged after each transformation, then the test is +reported as passing. Any discrepancy in the model state causes the test to +fail. + +Model state is currently defined by the ``ocean.stats`` output file, which +reports the total energy (per unit mass) at machine precision alongside similar +global metrics at lower precision, such as mass or mean sea level. + +Diagnostics are based on the MOM checksum function, which includes the mean, +minimum, and maximum values, alongside a bitcount checksum, in the physical +domain, which are saved in the ``chksum_diag`` output file. + + +Regression testing +================== + +When ``DO_REGRESSION_TESTS`` is enabled, the Makefile will check out a second +copy of the repository from a specified URL and branch given by +``MOM_TARGET_URL`` and ``MOM_TARGET_BRANCH``, respectively. The code is +checked out into the ``TARGET_CODEBASE`` directory. + +The default settings, with resolved values as comments, are shown below.:: + + MOM_TARGET_SLUG = NOAA-GFDL/MOM6 + MOM_TARGET_URL = https://github.com/$(MOM_TARGET_SLUG) + #= https://github.com/NOAA-GFDL/MOM6 + MOM_TARGET_LOCAL_BRANCH = dev/gfdl + MOM_TARGET_BRANCH = origin/$(MOM_TARGET_LOCAL_BRANCH) + #= origin/dev/gfdl + TARGET_CODEBASE = $(BUILD)/target_codebase + +These default values can be configured to target a particular development +branch. + +Currently the target can only be specified by branch name, rather than hash. + +New diagnostics do not report as a fail, and are not tracked by any CIs, but +the test will report a warning to the user. + + +Code coverage +============= + +Code coverage reports the lines of code which have been tested, and can be used +to determine if a particular section is untested. + +To enable code coverage, set ``DO_COVERAGE`` to ``true``. + +Reports are stored in the build directories. There is one report per source +file, and each ends in the ``.gcov`` suffix. Two sets of coverage reports are +generated. + +``build/cov`` + Test suite code coverage + +``build/unit`` + Unit test code coverage + +To upload the tests to codecov.io, use the following rules.:: + + $ make report.cov # Test suite + $ make report.cov.unit # Unit test + +Note that any uploads will require a valid CodeCov token. If uploading through +the CI, this can be set up through your GitHub account. + +Pull request coverage reports for the CI can be checked at +https://codecov.io/gh/NOAA-GFDL/MOM6 + + +CI configuration +================ + +Whenever code is pushed to GitHub or a pull request (PR) is created, the test +suite is run. + +When the tests are run on the CI, the following variables are re-defined: + +- ``DO_REPRO_TESTS`` is set to ``true`` for all tests. + +- ``DO_REGRESSION_TESTS`` is set to ``true`` for a PR submission, and is unset for + code pushes. + +- ``DO_COVERAGE`` is set to ``true``. + + - For pull requests, ``REQUIRE_COVERAGE_UPLOAD`` is set to ``true``. + +- ``MOM_TARGET_SLUG`` is set to the URL stub of the model to be built. + + For submissions to NOAA-GFDL, this will be set to ``NOAA-GFDL/MOM6`` and the + reference URL will be ``https://github.com/NOAA-GFDL/MOM6``. + +- ``MOM_TARGET_LOCAL_BRANCH`` + + For a code push, this is set to the name of the active branch at GitHub. For + a PR, this is the name of the branch which is receiving the PR. diff --git a/ac/Makefile.in b/ac/Makefile.in index 599381a35b..930816bc8c 100644 --- a/ac/Makefile.in +++ b/ac/Makefile.in @@ -24,10 +24,14 @@ Makefile: @srcdir@/ac/Makefile.in config.status ./config.status +# Recursive wildcard (finds all files in $1 with suffixes in $2) +rwildcard=$(foreach d,$(wildcard $(1:=/*)),$(call rwildcard,$d,$2) $(filter $(subst *,%,$2),$d)) + + # Generate dependencies .PHONY: depend depend: Makefile.dep -Makefile.dep: +Makefile.dep: $(MAKEDEP) $(call rwildcard,$(SRC_DIRS),*.h *.c *.inc *.F90) $(MAKEDEP) -o Makefile.dep -e $(SRC_DIRS) diff --git a/ac/configure.ac b/ac/configure.ac index bf1cf11776..dc4962307e 100644 --- a/ac/configure.ac +++ b/ac/configure.ac @@ -231,7 +231,7 @@ AC_SUBST([MAKEDEP]) AC_SUBST([SRC_DIRS], ["${srcdir}/src ${MODEL_FRAMEWORK} ${srcdir}/config_src/external ${DRIVER_DIR} ${MEM_LAYOUT}"] ) -AC_CONFIG_COMMANDS([Makefile.dep], [make depend]) +AC_CONFIG_COMMANDS(Makefile.dep, [make depend]) # setjmp verification diff --git a/src/core/MOM.F90 b/src/core/MOM.F90 index 7b84fc81fe..e8e95ea560 100644 --- a/src/core/MOM.F90 +++ b/src/core/MOM.F90 @@ -40,8 +40,8 @@ module MOM use MOM_io, only : MOM_io_init, vardesc, var_desc use MOM_io, only : slasher, file_exists, MOM_read_data use MOM_obsolete_params, only : find_obsolete_params -use MOM_restart, only : register_restart_field, register_restart_pair -use MOM_restart, only : query_initialized, save_restart, restart_registry_lock +use MOM_restart, only : register_restart_field, register_restart_pair, save_restart +use MOM_restart, only : query_initialized, set_initialized, restart_registry_lock use MOM_restart, only : restart_init, is_new_run, determine_is_new_run, MOM_restart_CS use MOM_spatial_means, only : global_mass_integral use MOM_time_manager, only : time_type, real_to_time, time_type_to_real, operator(+) @@ -2926,11 +2926,12 @@ subroutine initialize_MOM(Time, Time_init, param_file, dirs, CS, restart_CSp, & endif else CS%tv%frazil(:,:) = 0.0 + call set_initialized(CS%tv%frazil, "frazil", restart_CSp) endif endif if (CS%interp_p_surf) then - CS%p_surf_prev_set = query_initialized(CS%p_surf_prev,"p_surf_prev",restart_CSp) + CS%p_surf_prev_set = query_initialized(CS%p_surf_prev, "p_surf_prev", restart_CSp) if (CS%p_surf_prev_set) then ! Test whether the dimensional rescaling has changed for pressure. @@ -2958,7 +2959,7 @@ subroutine initialize_MOM(Time, Time_init, param_file, dirs, CS, restart_CSp, & endif endif - if (query_initialized(CS%ave_ssh_ibc,"ave_ssh",restart_CSp)) then + if (query_initialized(CS%ave_ssh_ibc, "ave_ssh", restart_CSp)) then if ((US%m_to_Z_restart /= 0.0) .and. (US%m_to_Z_restart /= 1.0) ) then Z_rescale = 1.0 / US%m_to_Z_restart do j=js,je ; do i=is,ie @@ -2971,6 +2972,7 @@ subroutine initialize_MOM(Time, Time_init, param_file, dirs, CS, restart_CSp, & else call find_eta(CS%h, CS%tv, G, GV, US, CS%ave_ssh_ibc, dZref=G%Z_ref) endif + call set_initialized(CS%ave_ssh_ibc, "ave_ssh", restart_CSp) endif if (CS%split) deallocate(eta) diff --git a/src/core/MOM_dynamics_split_RK2.F90 b/src/core/MOM_dynamics_split_RK2.F90 index 003033659e..c011d18c44 100644 --- a/src/core/MOM_dynamics_split_RK2.F90 +++ b/src/core/MOM_dynamics_split_RK2.F90 @@ -30,7 +30,7 @@ module MOM_dynamics_split_RK2 use MOM_get_input, only : directories use MOM_io, only : vardesc, var_desc use MOM_restart, only : register_restart_field, register_restart_pair -use MOM_restart, only : query_initialized, save_restart +use MOM_restart, only : query_initialized, set_initialized, save_restart use MOM_restart, only : restart_init, is_new_run, MOM_restart_CS use MOM_time_manager, only : time_type, time_type_to_real, operator(+) use MOM_time_manager, only : operator(-), operator(>), operator(*), operator(/) @@ -1131,7 +1131,7 @@ subroutine initialize_dyn_split_RK2(u, v, h, uh, vh, eta, Time, G, GV, US, param type(param_file_type), intent(in) :: param_file !< parameter file for parsing type(diag_ctrl), target, intent(inout) :: diag !< to control diagnostics type(MOM_dyn_split_RK2_CS), pointer :: CS !< module control structure - type(MOM_restart_CS), intent(in) :: restart_CS !< MOM restart control structure + type(MOM_restart_CS), intent(inout) :: restart_CS !< MOM restart control structure real, intent(in) :: dt !< time step [T ~> s] type(accel_diag_ptrs), target, intent(inout) :: Accel_diag !< points to momentum equation terms for !! budget analysis @@ -1304,6 +1304,7 @@ subroutine initialize_dyn_split_RK2(u, v, h, uh, vh, eta, Time, G, GV, US, param do k=1,nz ; do j=js,je ; do i=is,ie CS%eta(i,j) = CS%eta(i,j) + h(i,j,k) enddo ; enddo ; enddo + call set_initialized(CS%eta, trim(eta_rest_name), restart_CS) elseif ((GV%m_to_H_restart /= 0.0) .and. (GV%m_to_H_restart /= 1.0)) then H_rescale = 1.0 / GV%m_to_H_restart do j=js,je ; do i=is,ie ; CS%eta(i,j) = H_rescale * CS%eta(i,j) ; enddo ; enddo @@ -1315,10 +1316,12 @@ subroutine initialize_dyn_split_RK2(u, v, h, uh, vh, eta, Time, G, GV, US, param CS%barotropic_CSp, restart_CS, calc_dtbt, CS%BT_cont, & CS%tides_CSp) - if (.not. query_initialized(CS%diffu,"diffu",restart_CS) .or. & - .not. query_initialized(CS%diffv,"diffv",restart_CS)) then + if (.not. query_initialized(CS%diffu, "diffu", restart_CS) .or. & + .not. query_initialized(CS%diffv, "diffv", restart_CS)) then call horizontal_viscosity(u, v, h, CS%diffu, CS%diffv, MEKE, VarMix, G, GV, US, CS%hor_visc, & OBC=CS%OBC, BT=CS%barotropic_CSp, TD=thickness_diffuse_CSp) + call set_initialized(CS%diffu, "diffu", restart_CS) + call set_initialized(CS%diffv, "diffv", restart_CS) else if ( (US%s_to_T_restart * US%m_to_L_restart /= 0.0) .and. & (US%s_to_T_restart**2 /= US%m_to_L_restart) ) then @@ -1332,10 +1335,12 @@ subroutine initialize_dyn_split_RK2(u, v, h, uh, vh, eta, Time, G, GV, US, param endif endif - if (.not. query_initialized(CS%u_av,"u2", restart_CS) .or. & - .not. query_initialized(CS%u_av,"v2", restart_CS)) then + if (.not. query_initialized(CS%u_av, "u2", restart_CS) .or. & + .not. query_initialized(CS%v_av, "v2", restart_CS)) then do k=1,nz ; do j=jsd,jed ; do I=IsdB,IedB ; CS%u_av(I,j,k) = u(I,j,k) ; enddo ; enddo ; enddo do k=1,nz ; do J=JsdB,JedB ; do i=isd,ied ; CS%v_av(i,J,k) = v(i,J,k) ; enddo ; enddo ; enddo + call set_initialized(CS%u_av, "u2", restart_CS) + call set_initialized(CS%v_av, "v2", restart_CS) elseif ( (US%s_to_T_restart * US%m_to_L_restart /= 0.0) .and. & (US%s_to_T_restart /= US%m_to_L_restart) ) then vel_rescale = US%s_to_T_restart / US%m_to_L_restart @@ -1344,17 +1349,21 @@ subroutine initialize_dyn_split_RK2(u, v, h, uh, vh, eta, Time, G, GV, US, param endif ! This call is just here to initialize uh and vh. - if (.not. query_initialized(uh,"uh",restart_CS) .or. & - .not. query_initialized(vh,"vh",restart_CS)) then + if (.not. query_initialized(uh, "uh", restart_CS) .or. & + .not. query_initialized(vh, "vh", restart_CS)) then do k=1,nz ; do j=jsd,jed ; do i=isd,ied ; h_tmp(i,j,k) = h(i,j,k) ; enddo ; enddo ; enddo call continuity(u, v, h, h_tmp, uh, vh, dt, G, GV, US, CS%continuity_CSp, CS%OBC, pbv) call pass_var(h_tmp, G%Domain, clock=id_clock_pass_init) do k=1,nz ; do j=jsd,jed ; do i=isd,ied CS%h_av(i,j,k) = 0.5*(h(i,j,k) + h_tmp(i,j,k)) enddo ; enddo ; enddo + call set_initialized(uh, "uh", restart_CS) + call set_initialized(vh, "vh", restart_CS) + call set_initialized(CS%h_av, "h2", restart_CS) else - if (.not. query_initialized(CS%h_av,"h2",restart_CS)) then + if (.not. query_initialized(CS%h_av, "h2", restart_CS)) then CS%h_av(:,:,:) = h(:,:,:) + call set_initialized(CS%h_av, "h2", restart_CS) elseif ((GV%m_to_H_restart /= 0.0) .and. (GV%m_to_H_restart /= 1.0)) then H_rescale = 1.0 / GV%m_to_H_restart do k=1,nz ; do j=js,je ; do i=is,ie ; CS%h_av(i,j,k) = H_rescale * CS%h_av(i,j,k) ; enddo ; enddo ; enddo diff --git a/src/core/MOM_open_boundary.F90 b/src/core/MOM_open_boundary.F90 index 7f170f5510..63b9434269 100644 --- a/src/core/MOM_open_boundary.F90 +++ b/src/core/MOM_open_boundary.F90 @@ -3696,6 +3696,7 @@ subroutine update_OBC_segment_data(G, GV, US, OBC, tv, h, Time) real, dimension(:,:,:), allocatable, target :: tmp_buffer ! A buffer for input data [various units] real, dimension(:), allocatable :: h_stack ! Thicknesses at corner points [H ~> m or kg m-2] integer :: is_obc2, js_obc2 + integer :: i_seg_offset, j_seg_offset real :: net_H_src ! Total thickness of the incoming flow in the source field [H ~> m or kg m-2] real :: net_H_int ! Total thickness of the incoming flow in the model [H ~> m or kg m-2] real :: scl_fac ! A scaling factor to compensate for differences in total thicknesses [nondim] @@ -3737,6 +3738,8 @@ subroutine update_OBC_segment_data(G, GV, US, OBC, tv, h, Time) ie_obc = min(segment%ie_obc,ied) js_obc = max(segment%js_obc,jsd-1) je_obc = min(segment%je_obc,jed) + i_seg_offset = G%idg_offset - segment%HI%Isgb + j_seg_offset = G%jdg_offset - segment%HI%Jsgb ! Calculate auxiliary fields at staggered locations. ! Segment indices are on q points: @@ -3890,19 +3893,19 @@ subroutine update_OBC_segment_data(G, GV, US, OBC, tv, h, Time) if (segment%field(m)%name == 'V' .or. segment%field(m)%name == 'DVDX' .or. & segment%field(m)%name == 'Vamp' .or. segment%field(m)%name == 'Vphase') then segment%field(m)%buffer_src(is_obc,:,:) = & - tmp_buffer(1,2*(js_obc+G%jdg_offset-segment%HI%Jsgb)+1:2*(je_obc+G%jdg_offset-segment%HI%Jsgb)+1:2,:) + tmp_buffer(1,2*(js_obc+j_seg_offset)+1:2*(je_obc+j_seg_offset)+1:2,:) else segment%field(m)%buffer_src(is_obc,:,:) = & - tmp_buffer(1,2*(js_obc+G%jdg_offset-segment%HI%Jsgb)+1:2*(je_obc+G%jdg_offset-segment%HI%Jsgb):2,:) + tmp_buffer(1,2*(js_obc+j_seg_offset)+1:2*(je_obc+j_seg_offset):2,:) endif else if (segment%field(m)%name == 'U' .or. segment%field(m)%name == 'DUDY' .or. & segment%field(m)%name == 'Uamp' .or. segment%field(m)%name == 'Uphase') then segment%field(m)%buffer_src(:,js_obc,:) = & - tmp_buffer(2*(is_obc+G%idg_offset-segment%HI%Isgb)+1:2*(ie_obc+G%idg_offset-segment%HI%Isgb)+1:2,1,:) + tmp_buffer(2*(is_obc+i_seg_offset)+1:2*(ie_obc+i_seg_offset)+1:2,1,:) else segment%field(m)%buffer_src(:,js_obc,:) = & - tmp_buffer(2*(is_obc+G%idg_offset-segment%HI%Isgb)+1:2*(ie_obc+G%idg_offset-segment%HI%Isgb):2,1,:) + tmp_buffer(2*(is_obc+i_seg_offset)+1:2*(ie_obc+i_seg_offset):2,1,:) endif endif else @@ -3910,19 +3913,19 @@ subroutine update_OBC_segment_data(G, GV, US, OBC, tv, h, Time) if (segment%field(m)%name == 'V' .or. segment%field(m)%name == 'DVDX' .or. & segment%field(m)%name == 'Vamp' .or. segment%field(m)%name == 'Vphase') then segment%field(m)%buffer_src(is_obc,:,:) = & - tmp_buffer(1,js_obc+G%jdg_offset-segment%HI%Jsgb+1:je_obc+G%jdg_offset-segment%HI%Jsgb+1,:) + tmp_buffer(1,js_obc+j_seg_offset+1:je_obc+j_seg_offset+1,:) else segment%field(m)%buffer_src(is_obc,:,:) = & - tmp_buffer(1,js_obc+G%jdg_offset-segment%HI%Jsgb+1:je_obc+G%jdg_offset-segment%HI%Jsgb,:) + tmp_buffer(1,js_obc+j_seg_offset+1:je_obc+j_seg_offset,:) endif else if (segment%field(m)%name == 'U' .or. segment%field(m)%name == 'DUDY' .or. & segment%field(m)%name == 'Uamp' .or. segment%field(m)%name == 'Uphase') then segment%field(m)%buffer_src(:,js_obc,:) = & - tmp_buffer(is_obc+G%idg_offset-segment%HI%Isgb+1:ie_obc+G%idg_offset-segment%HI%Isgb+1,1,:) + tmp_buffer(is_obc+i_seg_offset+1:ie_obc+i_seg_offset+1,1,:) else segment%field(m)%buffer_src(:,js_obc,:) = & - tmp_buffer(is_obc+G%idg_offset-segment%HI%Isgb+1:ie_obc+G%idg_offset-segment%HI%Isgb,1,:) + tmp_buffer(is_obc+i_seg_offset+1:ie_obc+i_seg_offset,1,:) endif endif endif @@ -3949,40 +3952,36 @@ subroutine update_OBC_segment_data(G, GV, US, OBC, tv, h, Time) if (segment%is_E_or_W) then if (segment%field(m)%name == 'V' .or. segment%field(m)%name == 'DVDX') then segment%field(m)%dz_src(is_obc,:,:) = & - tmp_buffer(1,2*(js_obc+G%jdg_offset-segment%HI%Jsgb)+1:2*(je_obc+G%jdg_offset- & - segment%HI%Jsgb)+1:2,:) + tmp_buffer(1,2*(js_obc+j_seg_offset)+1:2*(je_obc+j_seg_offset)+1:2,:) else segment%field(m)%dz_src(is_obc,:,:) = & - tmp_buffer(1,2*(js_obc+G%jdg_offset-segment%HI%Jsgb)+1:2*(je_obc+G%jdg_offset- & - segment%HI%Jsgb):2,:) + tmp_buffer(1,2*(js_obc+j_seg_offset)+1:2*(je_obc+j_seg_offset):2,:) endif else if (segment%field(m)%name == 'U' .or. segment%field(m)%name == 'DUDY') then segment%field(m)%dz_src(:,js_obc,:) = & - tmp_buffer(2*(is_obc+G%idg_offset-segment%HI%Isgb)+1:2*(ie_obc+G%idg_offset- & - segment%HI%Isgb)+1:2,1,:) + tmp_buffer(2*(is_obc+i_seg_offset)+1:2*(ie_obc+i_seg_offset)+1:2,1,:) else segment%field(m)%dz_src(:,js_obc,:) = & - tmp_buffer(2*(is_obc+G%idg_offset-segment%HI%Isgb)+1:2*(ie_obc+G%idg_offset- & - segment%HI%Isgb):2,1,:) + tmp_buffer(2*(is_obc+i_seg_offset)+1:2*(ie_obc+i_seg_offset):2,1,:) endif endif else if (segment%is_E_or_W) then if (segment%field(m)%name == 'V' .or. segment%field(m)%name == 'DVDX') then segment%field(m)%dz_src(is_obc,:,:) = & - tmp_buffer(1,js_obc+G%jdg_offset-segment%HI%Jsgb+1:je_obc+G%jdg_offset-segment%HI%Jsgb+1,:) + tmp_buffer(1,js_obc+j_seg_offset+1:je_obc+j_seg_offset+1,:) else segment%field(m)%dz_src(is_obc,:,:) = & - tmp_buffer(1,js_obc+G%jdg_offset-segment%HI%Jsgb+1:je_obc+G%jdg_offset-segment%HI%Jsgb,:) + tmp_buffer(1,js_obc+j_seg_offset+1:je_obc+j_seg_offset,:) endif else if (segment%field(m)%name == 'U' .or. segment%field(m)%name == 'DUDY') then segment%field(m)%dz_src(:,js_obc,:) = & - tmp_buffer(is_obc+G%idg_offset-segment%HI%Isgb+1:ie_obc+G%idg_offset-segment%HI%Isgb+1,1,:) + tmp_buffer(is_obc+i_seg_offset+1:ie_obc+i_seg_offset+1,1,:) else segment%field(m)%dz_src(:,js_obc,:) = & - tmp_buffer(is_obc+G%idg_offset-segment%HI%Isgb+1:ie_obc+G%idg_offset-segment%HI%Isgb,1,:) + tmp_buffer(is_obc+i_seg_offset+1:ie_obc+i_seg_offset,1,:) endif endif endif diff --git a/src/diagnostics/MOM_wave_structure.F90 b/src/diagnostics/MOM_wave_structure.F90 index d11a7af5ec..0f97b560db 100644 --- a/src/diagnostics/MOM_wave_structure.F90 +++ b/src/diagnostics/MOM_wave_structure.F90 @@ -42,7 +42,8 @@ module MOM_wave_structure real, allocatable, dimension(:,:,:) :: w_strct !< Vertical structure of vertical velocity (normalized) [nondim]. real, allocatable, dimension(:,:,:) :: u_strct - !< Vertical structure of horizontal velocity (normalized) [nondim]. + !< Vertical structure of horizontal velocity (normalized and + !! divided by layer thicknesses) [Z-1 ~> m-1]. real, allocatable, dimension(:,:,:) :: W_profile !< Vertical profile of w_hat(z), where !! w(x,y,z,t) = w_hat(z)*exp(i(kx+ly-freq*t)) is the full time- @@ -141,7 +142,7 @@ subroutine wave_structure(h, tv, G, GV, US, cn, ModeNum, freq, CS, En, full_halo HxS_here, & !< A layer integrated salinity [S Z ~> ppt m] HxR_here !< A layer integrated density [R Z ~> kg m-2] real :: I_Hnew !< The inverse of a new layer thickness [Z-1 ~> m-1] - real :: drxh_sum !< The sum of density diffrences across interfaces times thicknesses [R Z ~> kg m-2] + real :: drxh_sum !< The sum of density differences across interfaces times thicknesses [R Z ~> kg m-2] real, parameter :: tol1 = 0.0001, tol2 = 0.001 real :: g_Rho0 !< G_Earth/Rho0 in [L2 Z-1 T-2 R-1 ~> m4 s-2 kg-1]. ! real :: rescale, I_rescale @@ -152,40 +153,47 @@ subroutine wave_structure(h, tv, G, GV, US, cn, ModeNum, freq, CS, En, full_halo real :: I_a_int !< inverse of a_int [nondim] real :: f2 !< squared Coriolis frequency [T-2 ~> s-2] real :: Kmag2 !< magnitude of horizontal wave number squared [L-2 ~> m-2] + real :: emag2 ! The sum of the squared magnitudes of the guesses [nondim] + real :: pi_htot ! The gravest vertical wavenumber in this column [Z-1 ~> m-1] + real :: renorm ! A renormalization factor [nondim] logical :: use_EOS !< If true, density is calculated from T & S using an !! equation of state. ! local representations of variables in CS; note, ! not all rows will be filled if layers get merged! real, dimension(SZK_(GV)+1) :: w_strct !< Vertical structure of vertical velocity (normalized) [nondim]. - real, dimension(SZK_(GV)+1) :: u_strct !< Vertical structure of horizontal velocity (normalized) [nondim]. + real, dimension(SZK_(GV)+1) :: u_strct !< Vertical structure of horizontal velocity (normalized and + !! divided by layer thicknesses) [Z-1 ~> m-1]. real, dimension(SZK_(GV)+1) :: W_profile !< Vertical profile of w_hat(z) = W0*w_strct(z) [Z T-1 ~> m s-1]. real, dimension(SZK_(GV)+1) :: Uavg_profile !< Vertical profile of the magnitude of - !! horizontal velocity [L T-1 ~> m s-1]. + !! horizontal velocity [L T-1 ~> m s-1]. real, dimension(SZK_(GV)+1) :: z_int !< Integrated depth [Z ~> m] real, dimension(SZK_(GV)+1) :: N2 !< Squared buoyancy frequency at each interface [T-2 ~> s-2]. real, dimension(SZK_(GV)+1) :: w_strct2 !< squared values [nondim] - real, dimension(SZK_(GV)+1) :: u_strct2 !< squared values [nondim] + real, dimension(SZK_(GV)+1) :: u_strct2 !< squared values [Z-2 ~> m-2] real, dimension(SZK_(GV)) :: dz !< thicknesses of merged layers (same as Hc I hope) [Z ~> m] - ! real, dimension(SZK_(GV)+1) :: dWdz_profile !< profile of dW/dz - real :: w2avg !< average of squared vertical velocity structure funtion [Z ~> m] - real :: int_dwdz2 !< Vertical integral of the square of u_strct [Z ~> m] + ! real, dimension(SZK_(GV)+1) :: dWdz_profile !< profile of dW/dz times total depth [Z T-1 ~> m s-1] + real :: w2avg !< average of squared vertical velocity structure function [Z ~> m] + real :: int_dwdz2 !< Vertical integral of the square of u_strct [Z-1 ~> m-1] real :: int_w2 !< Vertical integral of the square of w_strct [Z ~> m] real :: int_N2w2 !< Vertical integral of N2 [Z T-2 ~> m s-2] real :: KE_term !< terms in vertically averaged energy equation [R Z ~> kg m-2] real :: PE_term !< terms in vertically averaged energy equation [R Z ~> kg m-2] real :: W0 !< A vertical velocity magnitude [Z T-1 ~> m s-1] - real :: gp_unscaled !< A version of gprime rescaled to [L T-2 ~> m s-2]. + real :: U_mag !< A horizontal velocity magnitude times the depth of the + !! ocean [Z L T-1 ~> m2 s-1] real, dimension(SZK_(GV)-1) :: lam_z !< product of eigen value and gprime(k); one value for each - !< interface (excluding surface and bottom) - real, dimension(SZK_(GV)-1) :: a_diag, b_diag, c_diag - !< diagonals of tridiagonal matrix; one value for each - !< interface (excluding surface and bottom) + !< interface (excluding surface and bottom) [Z-1 ~> m-1] + real, dimension(SZK_(GV)-1) :: a_diag !< upper diagonal of tridiagonal matrix; one value for each + !< interface (excluding surface and bottom) [Z-1 ~> m-1] + real, dimension(SZK_(GV)-1) :: c_diag !< lower diagonal of tridiagonal matrix; one value for each + !< interface (excluding surface and bottom) [Z-1 ~> m-1] + real, dimension(SZK_(GV)-1) :: b_dom !< Matrix center diagonal offset from a_diag + c_diag; one value + !< for each interface (excluding surface and bottom) [Z-1 ~> m-1] real, dimension(SZK_(GV)-1) :: e_guess !< guess at eigen vector with unit amplitude (for TDMA) [nondim] real, dimension(SZK_(GV)-1) :: e_itt !< improved guess at eigen vector (from TDMA) [nondim] - real :: Pi - integer :: kc - integer :: i, j, k, k2, itt, is, ie, js, je, nz, nzm, row, ig, jg, ig_stop, jg_stop + real :: Pi ! 3.1415926535... [nondim] + integer :: i, j, k, k2, kc, itt, is, ie, js, je, nz, nzm, row, ig, jg, ig_stop, jg_stop is = G%isc ; ie = G%iec ; js = G%jsc ; je = G%jec ; nz = GV%ke I_a_int = 1/a_int @@ -409,78 +417,85 @@ subroutine wave_structure(h, tv, G, GV, US, cn, ModeNum, freq, CS, En, full_halo call MOM_error(FATAL, "wave_structure: mismatch in total depths") endif - ! Note that many of the calcluation from here on revert to using vertical - ! distances in m, not Z. - ! Populate interior rows of tridiagonal matrix; must multiply through by ! gprime to get tridiagonal matrix to the symmetrical form: ! [-1/H(k-1)]e(k-1) + [1/H(k-1)+1/H(k)-lam_z]e(k) + [-1/H(k)]e(k+1) = 0, ! where lam_z = lam*gprime is now a function of depth. - ! Frist, populate interior rows + ! First, populate interior rows - ! init the values in matrix: since number of layers is variable, values need - ! to be reset + ! init the values in matrix: since number of layers is variable, values need to be reset lam_z(:) = 0.0 a_diag(:) = 0.0 - b_diag(:) = 0.0 + b_dom(:) = 0.0 c_diag(:) = 0.0 e_guess(:) = 0.0 e_itt(:) = 0.0 w_strct(:) = 0.0 do K=3,kc-1 row = K-1 ! indexing for TD matrix rows - gp_unscaled = gprime(K) - lam_z(row) = lam*gp_unscaled - a_diag(row) = gp_unscaled*(-Igu(K)) - b_diag(row) = gp_unscaled*(Igu(K)+Igl(K)) - lam_z(row) - c_diag(row) = gp_unscaled*(-Igl(K)) + lam_z(row) = lam*gprime(K) + a_diag(row) = gprime(K)*(-Igu(K)) + b_dom(row) = 2.0*gprime(K)*(Igu(K)+Igl(K)) - lam_z(row) + c_diag(row) = gprime(K)*(-Igl(K)) + enddo + if (CS%debug) then ; do row=2,kc-2 if (isnan(lam_z(row)))then ; print *, "Wave_structure: lam_z(row) is NAN" ; endif if (isnan(a_diag(row)))then ; print *, "Wave_structure: a(k) is NAN" ; endif - if (isnan(b_diag(row)))then ; print *, "Wave_structure: b(k) is NAN" ; endif if (isnan(c_diag(row)))then ; print *, "Wave_structure: c(k) is NAN" ; endif - enddo + enddo ; endif ! Populate top row of tridiagonal matrix K=2 ; row = K-1 ; - gp_unscaled = gprime(K) - lam_z(row) = lam*gp_unscaled + lam_z(row) = lam*gprime(K) a_diag(row) = 0.0 - b_diag(row) = gp_unscaled*(Igu(K)+Igl(K)) - lam_z(row) - c_diag(row) = gp_unscaled*(-Igl(K)) + b_dom(row) = gprime(K)*(Igu(K)+2.0*Igl(K)) - lam_z(row) + c_diag(row) = gprime(K)*(-Igl(K)) ! Populate bottom row of tridiagonal matrix K=kc ; row = K-1 - gp_unscaled = gprime(K) - lam_z(row) = lam*gp_unscaled - a_diag(row) = gp_unscaled*(-Igu(K)) - b_diag(row) = gp_unscaled*(Igu(K)+Igl(K)) - lam_z(row) + lam_z(row) = lam*gprime(K) + a_diag(row) = gprime(K)*(-Igu(K)) + b_dom(row) = gprime(K)*(2.0*Igu(K) + Igl(K)) - lam_z(row) c_diag(row) = 0.0 - ! Guess a vector shape to start with (excludes surface and bottom) - e_guess(1:kc-1) = sin((z_int(2:kc)/htot(i,j)) *Pi) - e_guess(1:kc-1) = e_guess(1:kc-1)/sqrt(sum(e_guess(1:kc-1)**2)) + ! Guess a normalized vector shape to start with (excludes surface and bottom) + emag2 = 0.0 + pi_htot = Pi / htot(i,j) + do K=2,kc + e_guess(K-1) = sin(pi_htot * z_int(K)) + emag2 = emag2 + e_guess(K-1)**2 + enddo + renorm = 1.0 / sqrt(emag2) + do K=2,kc ; e_guess(K-1) = renorm*e_guess(K-1) ; enddo ! Perform inverse iteration with tri-diag solver do itt=1,max_itt ! this solver becomes unstable very quickly + ! b_diag(1:kc-1) = b_dom(1:kc-1) - (a_diag(1:kc-1) + c_diag(1:kc-1)) !call tridiag_solver(a_diag(1:kc-1),b_diag(1:kc-1),c_diag(1:kc-1), & ! -lam_z(1:kc-1),e_guess(1:kc-1),"TDMA_T",e_itt) - call solve_diag_dominant_tridiag( c_diag(1:kc-1), b_diag(1:kc-1) - (a_diag(1:kc-1)+c_diag(1:kc-1)), & - a_diag(1:kc-1), e_guess(1:kc-1), & - e_itt, kc-1 ) - e_guess(1:kc-1) = e_itt(1:kc-1) / sqrt(sum(e_itt(1:kc-1)**2)) + call solve_diag_dominant_tridiag( c_diag, b_dom, a_diag, e_guess, e_itt, kc-1 ) + ! Renormalize the guesses of the structure.- + emag2 = 0.0 + do K=2,kc ; emag2 = emag2 + e_itt(K-1)**2 ; enddo + renorm = 1.0 / sqrt(emag2) + do K=2,kc ; e_guess(K-1) = renorm*e_itt(K-1) ; enddo + + ! A test should be added here to evaluate convergence. enddo ! itt-loop - w_strct(2:kc) = e_guess(1:kc-1) + do K=2,kc ; w_strct(K) = e_guess(K-1) ; enddo w_strct(1) = 0.0 ! rigid lid at surface w_strct(kc+1) = 0.0 ! zero-flux at bottom ! Check to see if solver worked - ig_stop = 0 ; jg_stop = 0 - if (isnan(sum(w_strct(1:kc+1))))then - print *, "Wave_structure: w_strct has a NAN at ig=", ig, ", jg=", jg - if (iG%iec .or. jG%jec)then - print *, "This is occuring at a halo point." + if (CS%debug) then + ig_stop = 0 ; jg_stop = 0 + if (isnan(sum(w_strct(1:kc+1)))) then + print *, "Wave_structure: w_strct has a NAN at ig=", ig, ", jg=", jg + if (iG%iec .or. jG%jec)then + print *, "This is occuring at a halo point." + endif + ig_stop = ig ; jg_stop = jg endif - ig_stop = ig ; jg_stop = jg endif ! Normalize vertical structure function of w such that @@ -493,7 +508,8 @@ subroutine wave_structure(h, tv, G, GV, US, cn, ModeNum, freq, CS, En, full_halo w2avg = w2avg + 0.5*(w_strct(K)**2+w_strct(K+1)**2)*dz(k) enddo ! correct renormalization: - w_strct(:) = w_strct(:) * sqrt(htot(i,j)*a_int/w2avg) + renorm = sqrt(htot(i,j)*a_int/w2avg) + do K=1,kc+1 ; w_strct(K) = renorm * w_strct(K) ; enddo ! Calculate vertical structure function of u (i.e. dw/dz) do K=2,nzm-1 @@ -510,8 +526,10 @@ subroutine wave_structure(h, tv, G, GV, US, cn, ModeNum, freq, CS, En, full_halo ! Calculate terms in vertically integrated energy equation int_dwdz2 = 0.0 ; int_w2 = 0.0 ; int_N2w2 = 0.0 - u_strct2(1:nzm) = u_strct(1:nzm)**2 - w_strct2(1:nzm) = w_strct(1:nzm)**2 + do K=1,nzm + u_strct2(K) = u_strct(K)**2 + w_strct2(K) = w_strct(K)**2 + enddo ! vertical integration with Trapezoidal rule do k=1,nzm-1 int_dwdz2 = int_dwdz2 + 0.5*(u_strct2(K)+u_strct2(K+1)) * dz(k) @@ -522,7 +540,7 @@ subroutine wave_structure(h, tv, G, GV, US, cn, ModeNum, freq, CS, En, full_halo ! Back-calculate amplitude from energy equation if (present(En) .and. (freq**2*Kmag2 > 0.0)) then ! Units here are [R Z ~> kg m-2] - KE_term = 0.25*GV%Rho0*( ((freq**2 + f2) / (freq**2*Kmag2))*int_dwdz2 + int_w2 ) + KE_term = 0.25*GV%Rho0*( ((freq**2 + f2) / (freq**2*Kmag2))*US%L_to_Z**2*int_dwdz2 + int_w2 ) PE_term = 0.25*GV%Rho0*( int_N2w2 / freq**2 ) if (En(i,j) >= 0.0) then W0 = sqrt( En(i,j) / (KE_term + PE_term) ) @@ -532,34 +550,43 @@ subroutine wave_structure(h, tv, G, GV, US, cn, ModeNum, freq, CS, En, full_halo W0 = 0.0 endif ! Calculate actual vertical velocity profile and derivative - W_profile(:) = W0*w_strct(:) - ! dWdz_profile(:) = W0*u_strct(:) - ! Calculate average magnitude of actual horizontal velocity over a period - Uavg_profile(:) = abs(W0*u_strct(:)) * sqrt((freq**2 + f2) / (2.0*freq**2*Kmag2)) + U_mag = W0 * sqrt((freq**2 + f2) / (2.0*freq**2*Kmag2)) + do K=1,nzm + W_profile(K) = W0*w_strct(K) + ! dWdz_profile(K) = W0*u_strct(K) + ! Calculate average magnitude of actual horizontal velocity over a period + Uavg_profile(K) = abs(U_mag * u_strct(K)) + enddo else - W_profile(:) = 0.0 - ! dWdz_profile(:) = 0.0 - Uavg_profile(:) = 0.0 + do K=1,nzm + W_profile(K) = 0.0 + ! dWdz_profile(K) = 0.0 + Uavg_profile(K) = 0.0 + enddo endif ! Store values in control structure - CS%w_strct(i,j,1:nzm) = w_strct(1:nzm) - CS%u_strct(i,j,1:nzm) = u_strct(1:nzm) - CS%W_profile(i,j,1:nzm) = W_profile(1:nzm) - CS%Uavg_profile(i,j,1:nzm)= Uavg_profile(1:nzm) - CS%z_depths(i,j,1:nzm) = z_int(1:nzm) - CS%N2(i,j,1:nzm) = N2(1:nzm) - CS%num_intfaces(i,j) = nzm + do K=1,nzm + CS%w_strct(i,j,K) = w_strct(K) + CS%u_strct(i,j,K) = u_strct(K) + CS%W_profile(i,j,K) = W_profile(K) + CS%Uavg_profile(i,j,K) = Uavg_profile(K) + CS%z_depths(i,j,K) = z_int(K) + CS%N2(i,j,K) = N2(K) + enddo + CS%num_intfaces(i,j) = nzm else ! If not enough layers, default to zero nzm = kc+1 - CS%w_strct(i,j,1:nzm) = 0.0 - CS%u_strct(i,j,1:nzm) = 0.0 - CS%W_profile(i,j,1:nzm) = 0.0 - CS%Uavg_profile(i,j,1:nzm)= 0.0 - CS%z_depths(i,j,1:nzm) = 0.0 ! could use actual values - CS%N2(i,j,1:nzm) = 0.0 ! could use with actual values - CS%num_intfaces(i,j) = nzm + do K=1,nzm + CS%w_strct(i,j,K) = 0.0 + CS%u_strct(i,j,K) = 0.0 + CS%W_profile(i,j,K) = 0.0 + CS%Uavg_profile(i,j,K) = 0.0 + CS%z_depths(i,j,K) = 0.0 ! could use actual values + CS%N2(i,j,K) = 0.0 ! could use with actual values + enddo + CS%num_intfaces(i,j) = nzm endif ! kc >= 3 and kc > ModeNum + 1? endif ! drxh_sum >= 0? !else ! if at test point - delete later @@ -568,14 +595,16 @@ subroutine wave_structure(h, tv, G, GV, US, cn, ModeNum, freq, CS, En, full_halo endif ! mask2dT > 0.0? else ! if cn=0.0, default to zero - nzm = nz+1! could use actual values - CS%w_strct(i,j,1:nzm) = 0.0 - CS%u_strct(i,j,1:nzm) = 0.0 - CS%W_profile(i,j,1:nzm) = 0.0 - CS%Uavg_profile(i,j,1:nzm)= 0.0 - CS%z_depths(i,j,1:nzm) = 0.0 ! could use actual values - CS%N2(i,j,1:nzm) = 0.0 ! could use with actual values - CS%num_intfaces(i,j) = nzm + nzm = nz+1 ! could use actual values + do K=1,nzm + CS%w_strct(i,j,K) = 0.0 + CS%u_strct(i,j,K) = 0.0 + CS%W_profile(i,j,K) = 0.0 + CS%Uavg_profile(i,j,K) = 0.0 + CS%z_depths(i,j,K) = 0.0 ! could use actual values + CS%N2(i,j,K) = 0.0 ! could use with actual values + enddo + CS%num_intfaces(i,j) = nzm endif ; enddo ! if cn>0.0? ; i-loop enddo ! j-loop @@ -586,6 +615,8 @@ subroutine wave_structure(h, tv, G, GV, US, cn, ModeNum, freq, CS, En, full_halo end subroutine wave_structure +! The subroutine tridiag_solver is never used and could perhaps be deleted. + !> Solves a tri-diagonal system Ax=y using either the standard !! Thomas algorithm (TDMA_T) or its more stable variant that invokes the !! "Hallberg substitution" (TDMA_H). @@ -722,8 +753,8 @@ subroutine wave_structure_init(Time, G, GV, param_file, diag, CS) !! diagnostic output. type(wave_structure_CS), intent(inout) :: CS !< Wave structure control struct -! This include declares and sets the variable "version". -#include "version_variable.h" + ! This include declares and sets the variable "version". +# include "version_variable.h" character(len=40) :: mdl = "MOM_wave_structure" ! This module's name. integer :: isd, ied, jsd, jed, nz diff --git a/src/framework/MOM_restart.F90 b/src/framework/MOM_restart.F90 index 7081bbd0fb..6eba9be727 100644 --- a/src/framework/MOM_restart.F90 +++ b/src/framework/MOM_restart.F90 @@ -22,7 +22,8 @@ module MOM_restart implicit none ; private public restart_init, restart_end, restore_state, register_restart_field -public save_restart, query_initialized, restart_registry_lock, restart_init_end, vardesc +public save_restart, query_initialized, set_initialized +public restart_registry_lock, restart_init_end, vardesc public restart_files_exist, determine_is_new_run, is_new_run public register_restart_field_as_obsolete, register_restart_pair diff --git a/src/ice_shelf/MOM_ice_shelf.F90 b/src/ice_shelf/MOM_ice_shelf.F90 index 10e72bae16..26c74d73ec 100644 --- a/src/ice_shelf/MOM_ice_shelf.F90 +++ b/src/ice_shelf/MOM_ice_shelf.F90 @@ -34,7 +34,7 @@ module MOM_ice_shelf use MOM_io, only : field_exists, file_exists, MOM_read_data, write_version_number use MOM_io, only : slasher, fieldtype, vardesc, var_desc use MOM_io, only : write_field, close_file, SINGLE_FILE, MULTIPLE -use MOM_restart, only : register_restart_field, query_initialized, save_restart +use MOM_restart, only : register_restart_field, save_restart use MOM_restart, only : restart_init, restore_state, MOM_restart_CS, register_restart_pair use MOM_time_manager, only : time_type, time_type_to_real, real_to_time, operator(>), operator(-) use MOM_transcribe_grid, only : copy_dyngrid_to_MOM_grid, copy_MOM_grid_to_dyngrid diff --git a/src/ice_shelf/MOM_ice_shelf_dynamics.F90 b/src/ice_shelf/MOM_ice_shelf_dynamics.F90 index 4015c5d602..63ccc3d33c 100644 --- a/src/ice_shelf/MOM_ice_shelf_dynamics.F90 +++ b/src/ice_shelf/MOM_ice_shelf_dynamics.F90 @@ -16,8 +16,7 @@ module MOM_ice_shelf_dynamics use MOM_file_parser, only : read_param, get_param, log_param, log_version, param_file_type use MOM_grid, only : MOM_grid_init, ocean_grid_type use MOM_io, only : file_exists, slasher, MOM_read_data -use MOM_restart, only : register_restart_field, query_initialized -use MOM_restart, only : MOM_restart_CS +use MOM_restart, only : register_restart_field, MOM_restart_CS use MOM_time_manager, only : time_type, set_time use MOM_unit_scaling, only : unit_scale_type, unit_scaling_init !MJH use MOM_ice_shelf_initialize, only : initialize_ice_shelf_boundary diff --git a/src/initialization/MOM_state_initialization.F90 b/src/initialization/MOM_state_initialization.F90 index cd6ddbeca7..257d25dad0 100644 --- a/src/initialization/MOM_state_initialization.F90 +++ b/src/initialization/MOM_state_initialization.F90 @@ -2138,15 +2138,16 @@ subroutine initialize_sponges_file(G, GV, US, use_temperature, tv, u, v, depth_t endif ! The remaining calls to set_up_sponge_field can be in any order. if ( use_temperature) then - call set_up_ALE_sponge_field(filename, potemp_var, Time, G, GV, US, tv%T, ALE_CSp, scale=US%C_to_degC) - call set_up_ALE_sponge_field(filename, salin_var, Time, G, GV, US, tv%S, ALE_CSp, scale=US%S_to_ppt) + call set_up_ALE_sponge_field(filename, potemp_var, Time, G, GV, US, tv%T, ALE_CSp, scale=US%degC_to_C) + call set_up_ALE_sponge_field(filename, salin_var, Time, G, GV, US, tv%S, ALE_CSp, scale=US%ppt_to_S) endif if (sponge_uv) then filename = trim(inputdir)//trim(state_uv_file) call log_param(param_file, mdl, "INPUTDIR/SPONGE_STATE_UV_FILE", filename) if (.not.file_exists(filename, G%Domain)) & call MOM_error(FATAL, " initialize_sponges: Unable to open "//trim(filename)) - call set_up_ALE_sponge_vel_field(filename, u_var, filename, v_var, Time, G, GV, US, ALE_CSp, u, v) + call set_up_ALE_sponge_vel_field(filename, u_var, filename, v_var, Time, G, GV, US, & + ALE_CSp, u, v, scale=US%m_s_to_L_T) endif endif endif diff --git a/src/parameterizations/vertical/MOM_set_viscosity.F90 b/src/parameterizations/vertical/MOM_set_viscosity.F90 index 22d65110be..7c6d96dede 100644 --- a/src/parameterizations/vertical/MOM_set_viscosity.F90 +++ b/src/parameterizations/vertical/MOM_set_viscosity.F90 @@ -741,6 +741,9 @@ subroutine set_viscous_BBL(u, v, h, tv, visc, G, GV, US, CS, pbv) ! bbl_thick. if ((bbl_thick > 0.5*CS%Hbbl) .and. (CS%RiNo_mix)) bbl_thick = 0.5*CS%Hbbl + ! If drag is a body force, bbl_thick is HBBL + if (CS%body_force_drag) bbl_thick = h_bbl_drag(i) + if (CS%Channel_drag) then ! The drag within the bottommost bbl_thick is applied as a part of ! an enhanced bottom viscosity, while above this the drag is applied @@ -1022,7 +1025,7 @@ subroutine set_viscous_BBL(u, v, h, tv, visc, G, GV, US, CS, pbv) visc%Ray_v(i,J,k) = visc%Ray_v(i,J,k) + (CS%cdrag*US%L_to_Z*umag_avg(i)) * h_bbl_fr endif h_sum = h_sum + h_at_vel(i,k) - if (h_sum >= bbl_thick) exit ! The top of this layer is above the drag zone. + if (h_sum >= h_bbl_drag(i)) exit ! The top of this layer is above the drag zone. enddo ! Do not enhance the near-bottom viscosity in this case. Kv_bbl = CS%Kv_BBL_min @@ -2003,7 +2006,8 @@ subroutine set_visc_init(Time, G, GV, US, param_file, diag, visc, CS, restart_CS call get_param(param_file, mdl, "DRAG_AS_BODY_FORCE", CS%body_force_drag, & "If true, the bottom stress is imposed as an explicit body force "//& "applied over a fixed distance from the bottom, rather than as an "//& - "implicit calculation based on an enhanced near-bottom viscosity", & + "implicit calculation based on an enhanced near-bottom viscosity. "//& + "The thickness of the bottom boundary layer is HBBL.", & default=.false., do_not_log=.not.CS%bottomdraglaw) call get_param(param_file, mdl, "CHANNEL_DRAG", CS%Channel_drag, & "If true, the bottom drag is exerted directly on each "//& diff --git a/src/tracer/MOM_CFC_cap.F90 b/src/tracer/MOM_CFC_cap.F90 index 8e70ad3ed8..fc7e78e150 100644 --- a/src/tracer/MOM_CFC_cap.F90 +++ b/src/tracer/MOM_CFC_cap.F90 @@ -16,7 +16,7 @@ module MOM_CFC_cap use MOM_io, only : vardesc, var_desc, query_vardesc, stdout use MOM_tracer_registry, only : tracer_type use MOM_open_boundary, only : ocean_OBC_type -use MOM_restart, only : query_initialized, MOM_restart_CS +use MOM_restart, only : query_initialized, set_initialized, MOM_restart_CS use MOM_spatial_means, only : global_mass_int_EFP use MOM_time_manager, only : time_type use time_interp_external_mod, only : init_external_field, time_interp_external @@ -204,9 +204,11 @@ subroutine initialize_CFC_cap(restart, day, G, GV, US, h, diag, OBC, CS) do m=1,2 if (.not.restart .or. (CS%tracers_may_reinit .and. & - .not.query_initialized(CS%CFC_data(m)%conc, CS%CFC_data(m)%name, CS%restart_CSp))) & + .not.query_initialized(CS%CFC_data(m)%conc, CS%CFC_data(m)%name, CS%restart_CSp))) then call init_tracer_CFC(h, CS%CFC_data(m)%conc, CS%CFC_data(m)%name, CS%CFC_data(m)%land_val, & CS%CFC_data(m)%IC_val, G, GV, US, CS) + call set_initialized(CS%CFC_data(m)%conc, CS%CFC_data(m)%name, CS%restart_CSp) + endif ! cmor diagnostics ! CFC11 cmor conventions: http://clipc-services.ceda.ac.uk/dreq/u/42625c97b8fe75124a345962c4430982.html diff --git a/src/tracer/MOM_OCMIP2_CFC.F90 b/src/tracer/MOM_OCMIP2_CFC.F90 index 8594febddd..a864ec907f 100644 --- a/src/tracer/MOM_OCMIP2_CFC.F90 +++ b/src/tracer/MOM_OCMIP2_CFC.F90 @@ -15,7 +15,7 @@ module MOM_OCMIP2_CFC use MOM_io, only : file_exists, MOM_read_data, slasher use MOM_io, only : vardesc, var_desc, query_vardesc use MOM_open_boundary, only : ocean_OBC_type -use MOM_restart, only : query_initialized, MOM_restart_CS +use MOM_restart, only : query_initialized, set_initialized, MOM_restart_CS use MOM_spatial_means, only : global_mass_int_EFP use MOM_sponge, only : set_up_sponge_field, sponge_CS use MOM_time_manager, only : time_type @@ -335,14 +335,18 @@ subroutine initialize_OCMIP2_CFC(restart, day, G, GV, US, h, diag, OBC, CS, & CS%diag => diag if (.not.restart .or. (CS%tracers_may_reinit .and. & - .not.query_initialized(CS%CFC11, CS%CFC11_name, CS%restart_CSp))) & + .not.query_initialized(CS%CFC11, CS%CFC11_name, CS%restart_CSp))) then call init_tracer_CFC(h, CS%CFC11, CS%CFC11_name, CS%CFC11_land_val, & CS%CFC11_IC_val, G, GV, US, CS) + call set_initialized(CS%CFC11, CS%CFC11_name, CS%restart_CSp) + endif if (.not.restart .or. (CS%tracers_may_reinit .and. & - .not.query_initialized(CS%CFC12, CS%CFC12_name, CS%restart_CSp))) & + .not.query_initialized(CS%CFC12, CS%CFC12_name, CS%restart_CSp))) then call init_tracer_CFC(h, CS%CFC12, CS%CFC12_name, CS%CFC12_land_val, & CS%CFC12_IC_val, G, GV, US, CS) + call set_initialized(CS%CFC12, CS%CFC12_name, CS%restart_CSp) + endif if (associated(OBC)) then ! Steal from updated DOME in the fullness of time. diff --git a/src/tracer/MOM_generic_tracer.F90 b/src/tracer/MOM_generic_tracer.F90 index f3ab283cbb..6170aee602 100644 --- a/src/tracer/MOM_generic_tracer.F90 +++ b/src/tracer/MOM_generic_tracer.F90 @@ -39,7 +39,7 @@ module MOM_generic_tracer use MOM_hor_index, only : hor_index_type use MOM_io, only : file_exists, MOM_read_data, slasher use MOM_open_boundary, only : ocean_OBC_type - use MOM_restart, only : register_restart_field, query_initialized, MOM_restart_CS + use MOM_restart, only : register_restart_field, query_initialized, set_initialized, MOM_restart_CS use MOM_spatial_means, only : global_area_mean, global_mass_int_EFP use MOM_sponge, only : set_up_sponge_field, sponge_CS use MOM_time_manager, only : time_type, set_time @@ -345,6 +345,8 @@ subroutine initialize_MOM_generic_tracer(restart, day, G, GV, US, h, param_file, endif endif + + call set_initialized(tr_ptr, g_tracer_name, CS%restart_CSp) endif !traverse the linked list till hit NULL diff --git a/src/tracer/advection_test_tracer.F90 b/src/tracer/advection_test_tracer.F90 index 441189c0ac..a4e53ae797 100644 --- a/src/tracer/advection_test_tracer.F90 +++ b/src/tracer/advection_test_tracer.F90 @@ -13,7 +13,7 @@ module advection_test_tracer use MOM_hor_index, only : hor_index_type use MOM_io, only : slasher, vardesc, var_desc, query_vardesc use MOM_open_boundary, only : ocean_OBC_type -use MOM_restart, only : query_initialized, MOM_restart_CS +use MOM_restart, only : query_initialized, set_initialized, MOM_restart_CS use MOM_spatial_means, only : global_mass_int_EFP use MOM_sponge, only : set_up_sponge_field, sponge_CS use MOM_time_manager, only : time_type @@ -235,6 +235,8 @@ subroutine initialize_advection_test_tracer(restart, day, G, GV, h,diag, OBC, CS if (locx**2+locy**2<=1.0) CS%tr(i,j,k,m) = 1.0 if (locx>0.0.and.abs(locy)<0.2) CS%tr(i,j,k,m) = 0.0 enddo ; enddo + + call set_initialized(CS%tr(:,:,:,m), name, CS%restart_CSp) endif ! restart enddo diff --git a/src/tracer/boundary_impulse_tracer.F90 b/src/tracer/boundary_impulse_tracer.F90 index a4599a891e..3f8d8e7937 100644 --- a/src/tracer/boundary_impulse_tracer.F90 +++ b/src/tracer/boundary_impulse_tracer.F90 @@ -13,7 +13,7 @@ module boundary_impulse_tracer use MOM_hor_index, only : hor_index_type use MOM_io, only : vardesc, var_desc, query_vardesc use MOM_open_boundary, only : ocean_OBC_type -use MOM_restart, only : register_restart_field, query_initialized, MOM_restart_CS +use MOM_restart, only : register_restart_field, query_initialized, set_initialized, MOM_restart_CS use MOM_spatial_means, only : global_mass_int_EFP use MOM_sponge, only : set_up_sponge_field, sponge_CS use MOM_time_manager, only : time_type @@ -186,6 +186,7 @@ subroutine initialize_boundary_impulse_tracer(restart, day, G, GV, US, h, diag, do k=1,CS%nkml ; do j=jsd,jed ; do i=isd,ied CS%tr(i,j,k,m) = 1.0 enddo ; enddo ; enddo + call set_initialized(CS%tr(:,:,:,m), name, CS%restart_CSp) endif enddo ! Tracer loop diff --git a/src/tracer/ideal_age_example.F90 b/src/tracer/ideal_age_example.F90 index 2fdeaff02f..66c76f0e2c 100644 --- a/src/tracer/ideal_age_example.F90 +++ b/src/tracer/ideal_age_example.F90 @@ -13,7 +13,7 @@ module ideal_age_example use MOM_hor_index, only : hor_index_type use MOM_io, only : file_exists, MOM_read_data, slasher, vardesc, var_desc, query_vardesc use MOM_open_boundary, only : ocean_OBC_type -use MOM_restart, only : query_initialized, MOM_restart_CS +use MOM_restart, only : query_initialized, set_initialized, MOM_restart_CS use MOM_spatial_means, only : global_mass_int_EFP use MOM_sponge, only : set_up_sponge_field, sponge_CS use MOM_time_manager, only : time_type, time_type_to_real @@ -266,6 +266,7 @@ subroutine initialize_ideal_age_tracer(restart, day, G, GV, US, h, diag, OBC, CS enddo ; enddo ; enddo endif + call set_initialized(CS%tr(:,:,:,m), name, CS%restart_CSp) endif ! restart enddo ! Tracer loop diff --git a/src/tracer/nw2_tracers.F90 b/src/tracer/nw2_tracers.F90 index 2ecd2ba6e0..36885d8dc8 100644 --- a/src/tracer/nw2_tracers.F90 +++ b/src/tracer/nw2_tracers.F90 @@ -10,7 +10,7 @@ module nw2_tracers use MOM_grid, only : ocean_grid_type use MOM_hor_index, only : hor_index_type use MOM_io, only : file_exists, MOM_read_data, slasher, vardesc, var_desc -use MOM_restart, only : query_initialized, MOM_restart_CS +use MOM_restart, only : query_initialized, set_initialized, MOM_restart_CS use MOM_time_manager, only : time_type, time_type_to_real use MOM_tracer_registry, only : register_tracer, tracer_registry_type use MOM_tracer_diabatic, only : tracer_vertdiff, applyTracerBoundaryFluxesInOut @@ -162,10 +162,11 @@ subroutine initialize_nw2_tracers(restart, day, G, GV, US, h, tv, diag, CS) ! in which the tracers were not present write(var_name(1:8),'(a6,i2.2)') 'tracer',m if ((.not.restart) .or. & - (.not. query_initialized(CS%tr(:,:,:,m),var_name,CS%restart_CSp))) then + (.not. query_initialized(CS%tr(:,:,:,m), var_name, CS%restart_CSp))) then do k=1,GV%ke ; do j=G%jsc,G%jec ; do i=G%isc,G%iec CS%tr(i,j,k,m) = nw2_tracer_dist(m, G, GV, eta, i, j, k) enddo ; enddo ; enddo + call set_initialized(CS%tr(:,:,:,m), var_name, CS%restart_CSp) endif ! restart enddo ! Tracer loop diff --git a/src/tracer/oil_tracer.F90 b/src/tracer/oil_tracer.F90 index 5592b7627a..9b7b630237 100644 --- a/src/tracer/oil_tracer.F90 +++ b/src/tracer/oil_tracer.F90 @@ -14,7 +14,7 @@ module oil_tracer use MOM_io, only : file_exists, MOM_read_data, slasher use MOM_io, only : vardesc, var_desc, query_vardesc use MOM_open_boundary, only : ocean_OBC_type -use MOM_restart, only : query_initialized, MOM_restart_CS +use MOM_restart, only : query_initialized, set_initialized, MOM_restart_CS use MOM_spatial_means, only : global_mass_int_EFP use MOM_sponge, only : set_up_sponge_field, sponge_CS use MOM_time_manager, only : time_type, time_type_to_real @@ -278,7 +278,7 @@ subroutine initialize_oil_tracer(restart, day, G, GV, US, h, diag, OBC, CS, & endif enddo ; enddo ; enddo endif - + call set_initialized(CS%tr(:,:,:,m), name, CS%restart_CSp) endif ! restart enddo ! Tracer loop diff --git a/src/tracer/pseudo_salt_tracer.F90 b/src/tracer/pseudo_salt_tracer.F90 index 39320db405..9221d76f2c 100644 --- a/src/tracer/pseudo_salt_tracer.F90 +++ b/src/tracer/pseudo_salt_tracer.F90 @@ -15,7 +15,7 @@ module pseudo_salt_tracer use MOM_hor_index, only : hor_index_type use MOM_io, only : vardesc, var_desc, query_vardesc use MOM_open_boundary, only : ocean_OBC_type -use MOM_restart, only : query_initialized, MOM_restart_CS +use MOM_restart, only : query_initialized, set_initialized, MOM_restart_CS use MOM_spatial_means, only : global_mass_int_EFP use MOM_sponge, only : set_up_sponge_field, sponge_CS use MOM_time_manager, only : time_type @@ -148,6 +148,7 @@ subroutine initialize_pseudo_salt_tracer(restart, day, G, GV, US, h, diag, OBC, do k=1,nz ; do j=jsd,jed ; do i=isd,ied CS%ps(i,j,k) = US%S_to_ppt*tv%S(i,j,k) enddo ; enddo ; enddo + call set_initialized(CS%ps, name, CS%restart_CSp) endif if (associated(OBC)) then diff --git a/src/user/MOM_wave_interface.F90 b/src/user/MOM_wave_interface.F90 index da567ddf82..a423ddc8b8 100644 --- a/src/user/MOM_wave_interface.F90 +++ b/src/user/MOM_wave_interface.F90 @@ -20,7 +20,7 @@ module MOM_wave_interface use MOM_unit_scaling, only : unit_scale_type use MOM_variables, only : thermo_var_ptrs, surface use MOM_verticalgrid, only : verticalGrid_type -use MOM_restart, only : register_restart_field, MOM_restart_CS, query_initialized +use MOM_restart, only : register_restart_field, MOM_restart_CS implicit none ; private